tutorial-visp-pioneer-visual-servo.cpp
Go to the documentation of this file.
1 
21 #include <iostream>
22 
23 #include <visp/vpRobotPioneer.h>
24 #include <visp/vpCameraParameters.h>
25 #include <visp/vpDisplayX.h>
26 #include <visp/vpDot2.h>
27 #include <visp/vpFeatureBuilder.h>
28 #include <visp/vpFeatureDepth.h>
29 #include <visp/vpFeaturePoint.h>
30 #include <visp/vpHomogeneousMatrix.h>
31 #include <visp/vpImage.h>
32 #include <visp/vpImageConvert.h>
33 #include <visp/vp1394TwoGrabber.h>
34 #include <visp/vpServo.h>
35 #include <visp/vpVelocityTwistMatrix.h>
36 
37 #if defined(VISP_HAVE_DC1394_2) && defined(VISP_HAVE_X11) || defined(VISP_HAVE_PIONEER)
38 # define TEST_COULD_BE_ACHIEVED
39 #endif
40 
41 
42 #ifdef TEST_COULD_BE_ACHIEVED
43 int main(int argc, char **argv)
44 {
45  try {
46  vpImage<unsigned char> I; // Create a gray level image container
47  double depth = 1.;
48  double lambda = 0.6;
49  double coef = 1./6.77; // Scale parameter used to estimate the depth Z of the blob from its surface
50 
51  vpRobotPioneer robot;
52  ArArgumentParser parser(&argc, argv);
53  parser.loadDefaultArguments();
54 
55  // ArRobotConnector connects to the robot, get some initial data from it such as type and name,
56  // and then loads parameter files for this robot.
57  ArRobotConnector robotConnector(&parser, &robot);
58  if(!robotConnector.connectRobot())
59  {
60  ArLog::log(ArLog::Terse, "Could not connect to the robot.");
61  if(parser.checkHelpAndWarnUnparsed())
62  {
63  Aria::logOptions();
64  Aria::exit(1);
65  }
66  }
67  if (!Aria::parseArgs())
68  {
69  Aria::logOptions();
70  Aria::shutdown();
71  return false;
72  }
73 
74  // Wait 3 sec to be sure that the low level Aria thread used to control
75  // the robot is started. Without this delay we experienced a delay (arround 2.2 sec)
76  // between the velocity send to the robot and the velocity that is really applied
77  // to the wheels.
78  vpTime::sleepMs(3000);
79 
80  std::cout << "Robot connected" << std::endl;
81 
82  // Camera parameters. In this experiment we don't need a precise calibration of the camera
83  vpCameraParameters cam;
84 
85  // Create a grabber based on libdc1394-2.x third party lib (for firewire cameras under Linux)
86  vp1394TwoGrabber g(false);
87  g.setVideoMode(vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8);
88  g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_30);
89  // AVT Pike 032C parameters
90  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
91 
92  g.acquire(I);
93 
94  // Create an image viewer
95  vpDisplayX d(I, 10, 10, "Current frame");
96  vpDisplay::display(I);
97  vpDisplay::flush(I);
98 
99  // Create a blob tracker
100  vpDot2 dot;
101  dot.setGraphics(true);
102  dot.setComputeMoments(true);
103  dot.setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape
104  dot.setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation
105  dot.setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad inner and outside points with bad gray level
106  dot.initTracking(I);
107  vpDisplay::flush(I);
108 
109  vpServo task;
110  task.setServo(vpServo::EYEINHAND_L_cVe_eJe) ;
111  task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE) ;
112  task.setLambda(lambda) ;
113  vpVelocityTwistMatrix cVe ;
114  cVe = robot.get_cVe() ;
115  task.set_cVe(cVe) ;
116 
117  std::cout << "cVe: \n" << cVe << std::endl;
118 
119  vpMatrix eJe;
120  robot.get_eJe(eJe) ;
121  task.set_eJe(eJe) ;
122  std::cout << "eJe: \n" << eJe << std::endl;
123 
124  // Current and desired visual feature associated to the x coordinate of the point
125  vpFeaturePoint s_x, s_xd;
126 
127  // Create the current x visual feature
128  vpFeatureBuilder::create(s_x, cam, dot);
129 
130  // Create the desired x* visual feature
131  s_xd.buildFrom(0, 0, depth);
132 
133  // Add the feature
134  task.addFeature(s_x, s_xd) ;
135 
136  // Create the current log(Z/Z*) visual feature
137  vpFeatureDepth s_Z, s_Zd;
138  // Surface of the blob estimated from the image moment m00 and converted in meters
139  double surface = 1./sqrt(dot.m00/(cam.get_px()*cam.get_py()));
140  double Z, Zd;
141  // Initial depth of the blob in from of the camera
142  Z = coef * surface ;
143  // Desired depth Z* of the blob. This depth is learned and equal to the initial depth
144  Zd = Z;
145 
146  std::cout << "Z " << Z << std::endl;
147  s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z , 0); // log(Z/Z*) = 0 that's why the last parameter is 0
148  s_Zd.buildFrom(s_x.get_x(), s_x.get_y(), Zd , 0); // log(Z/Z*) = 0 that's why the last parameter is 0
149 
150  // Add the feature
151  task.addFeature(s_Z, s_Zd) ;
152 
153  vpColVector v; // vz, wx
154 
155  while(1)
156  {
157  // Acquire a new image
158  g.acquire(I);
159 
160  // Set the image as background of the viewer
161  vpDisplay::display(I);
162 
163  // Does the blob tracking
164  dot.track(I);
165  // Update the current x feature
166  vpFeatureBuilder::create(s_x, cam, dot);
167 
168  // Update log(Z/Z*) feature. Since the depth Z change, we need to update the intection matrix
169  surface = 1./sqrt(dot.m00/(cam.get_px()*cam.get_py()));
170  Z = coef * surface ;
171  s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z, log(Z/Zd)) ;
172 
173  robot.get_cVe(cVe) ;
174  task.set_cVe(cVe) ;
175 
176  robot.get_eJe(eJe) ;
177  task.set_eJe(eJe) ;
178 
179  // Compute the control law. Velocities are computed in the mobile robot reference frame
180  v = task.computeControlLaw() ;
181 
182  std::cout << "Send velocity to the pionner: " << v[0] << " m/s "
183  << vpMath::deg(v[1]) << " deg/s" << std::endl;
184 
185  // Send the velocity to the robot
186  robot.setVelocity(vpRobot::REFERENCE_FRAME, v);
187 
188  // Draw a vertical line which corresponds to the desired x coordinate of the dot cog
189  vpDisplay::displayLine(I, 0, 320, 479, 320, vpColor::red);
190  vpDisplay::flush(I);
191 
192  // A click in the viewer to exit
193  if ( vpDisplay::getClick(I, false) )
194  break;
195  }
196 
197  std::cout << "Ending robot thread..." << std::endl;
198  robot.stopRunning();
199 
200  // wait for the thread to stop
201  robot.waitForRunExit();
202 
203  // Kill the servo task
204  task.print() ;
205  task.kill();
206  }
207  catch(vpException e) {
208  std::cout << "Catch an exception: " << e << std::endl;
209  return 1;
210  }
211 }
212 #else
213 int main()
214 {
215  std::cout << "You don't have the right 3rd party libraries to run this example..." << std::endl;
216 }
217 #endif
d
int main(int argc, char **argv)
TFSIMD_FORCE_INLINE tfScalar dot(const Quaternion &q1, const Quaternion &q2)


visp_ros
Author(s): Francois Pasteau, Fabien Spindler, Gatien Gaumerais
autogenerated on Tue Feb 9 2021 03:40:20