tutorial-visp-pioneer-visual-servo.cpp
Go to the documentation of this file.
1 
21 #include <iostream>
22 
23 #include <visp/vp1394TwoGrabber.h>
24 #include <visp/vpCameraParameters.h>
25 #include <visp/vpDisplayX.h>
26 #include <visp/vpDot2.h>
27 #include <visp/vpFeatureBuilder.h>
28 #include <visp/vpFeatureDepth.h>
29 #include <visp/vpFeaturePoint.h>
30 #include <visp/vpHomogeneousMatrix.h>
31 #include <visp/vpImage.h>
32 #include <visp/vpImageConvert.h>
33 #include <visp/vpRobotPioneer.h>
34 #include <visp/vpServo.h>
35 #include <visp/vpVelocityTwistMatrix.h>
36 
37 #if defined( VISP_HAVE_DC1394_2 ) && defined( VISP_HAVE_X11 ) || defined( VISP_HAVE_PIONEER )
38 #define TEST_COULD_BE_ACHIEVED
39 #endif
40 
41 #ifdef TEST_COULD_BE_ACHIEVED
42 int
43 main( int argc, char **argv )
44 {
45  try
46  {
47  vpImage< unsigned char > I; // Create a gray level image container
48  double depth = 1.;
49  double lambda = 0.6;
50  double coef = 1. / 6.77; // Scale parameter used to estimate the depth Z of the blob from its surface
51 
52  vpRobotPioneer robot;
53  ArArgumentParser parser( &argc, argv );
54  parser.loadDefaultArguments();
55 
56  // ArRobotConnector connects to the robot, get some initial data from it such as type and name,
57  // and then loads parameter files for this robot.
58  ArRobotConnector robotConnector( &parser, &robot );
59  if ( !robotConnector.connectRobot() )
60  {
61  ArLog::log( ArLog::Terse, "Could not connect to the robot." );
62  if ( parser.checkHelpAndWarnUnparsed() )
63  {
64  Aria::logOptions();
65  Aria::exit( 1 );
66  }
67  }
68  if ( !Aria::parseArgs() )
69  {
70  Aria::logOptions();
71  Aria::shutdown();
72  return false;
73  }
74 
75  // Wait 3 sec to be sure that the low level Aria thread used to control
76  // the robot is started. Without this delay we experienced a delay (arround 2.2 sec)
77  // between the velocity send to the robot and the velocity that is really applied
78  // to the wheels.
79  vpTime::sleepMs( 3000 );
80 
81  std::cout << "Robot connected" << std::endl;
82 
83  // Camera parameters. In this experiment we don't need a precise calibration of the camera
84  vpCameraParameters cam;
85 
86  // Create a grabber based on libdc1394-2.x third party lib (for firewire cameras under Linux)
87  vp1394TwoGrabber g( false );
88  g.setVideoMode( vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8 );
89  g.setFramerate( vp1394TwoGrabber::vpFRAMERATE_30 );
90  // AVT Pike 032C parameters
91  cam.initPersProjWithoutDistortion( 800, 795, 320, 216 );
92 
93  g.acquire( I );
94 
95  // Create an image viewer
96  vpDisplayX d( I, 10, 10, "Current frame" );
97  vpDisplay::display( I );
98  vpDisplay::flush( I );
99 
100  // Create a blob tracker
101  vpDot2 dot;
102  dot.setGraphics( true );
103  dot.setComputeMoments( true );
104  dot.setEllipsoidShapePrecision( 0. ); // to track a blob without any constraint on the shape
105  dot.setGrayLevelPrecision( 0.9 ); // to set the blob gray level bounds for binarisation
106  dot.setEllipsoidBadPointsPercentage( 0.5 ); // to be accept 50% of bad inner and outside points with bad gray level
107  dot.initTracking( I );
108  vpDisplay::flush( I );
109 
110  vpServo task;
111  task.setServo( vpServo::EYEINHAND_L_cVe_eJe );
112  task.setInteractionMatrixType( vpServo::DESIRED, vpServo::PSEUDO_INVERSE );
113  task.setLambda( lambda );
114  vpVelocityTwistMatrix cVe;
115  cVe = robot.get_cVe();
116  task.set_cVe( cVe );
117 
118  std::cout << "cVe: \n" << cVe << std::endl;
119 
120  vpMatrix eJe;
121  robot.get_eJe( eJe );
122  task.set_eJe( eJe );
123  std::cout << "eJe: \n" << eJe << std::endl;
124 
125  // Current and desired visual feature associated to the x coordinate of the point
126  vpFeaturePoint s_x, s_xd;
127 
128  // Create the current x visual feature
129  vpFeatureBuilder::create( s_x, cam, dot );
130 
131  // Create the desired x* visual feature
132  s_xd.buildFrom( 0, 0, depth );
133 
134  // Add the feature
135  task.addFeature( s_x, s_xd );
136 
137  // Create the current log(Z/Z*) visual feature
138  vpFeatureDepth s_Z, s_Zd;
139  // Surface of the blob estimated from the image moment m00 and converted in meters
140  double surface = 1. / sqrt( dot.m00 / ( cam.get_px() * cam.get_py() ) );
141  double Z, Zd;
142  // Initial depth of the blob in from of the camera
143  Z = coef * surface;
144  // Desired depth Z* of the blob. This depth is learned and equal to the initial depth
145  Zd = Z;
146 
147  std::cout << "Z " << Z << std::endl;
148  s_Z.buildFrom( s_x.get_x(), s_x.get_y(), Z, 0 ); // log(Z/Z*) = 0 that's why the last parameter is 0
149  s_Zd.buildFrom( s_x.get_x(), s_x.get_y(), Zd, 0 ); // log(Z/Z*) = 0 that's why the last parameter is 0
150 
151  // Add the feature
152  task.addFeature( s_Z, s_Zd );
153 
154  vpColVector v; // vz, wx
155 
156  while ( 1 )
157  {
158  // Acquire a new image
159  g.acquire( I );
160 
161  // Set the image as background of the viewer
162  vpDisplay::display( I );
163 
164  // Does the blob tracking
165  dot.track( I );
166  // Update the current x feature
167  vpFeatureBuilder::create( s_x, cam, dot );
168 
169  // Update log(Z/Z*) feature. Since the depth Z change, we need to update the intection matrix
170  surface = 1. / sqrt( dot.m00 / ( cam.get_px() * cam.get_py() ) );
171  Z = coef * surface;
172  s_Z.buildFrom( s_x.get_x(), s_x.get_y(), Z, log( Z / Zd ) );
173 
174  robot.get_cVe( cVe );
175  task.set_cVe( cVe );
176 
177  robot.get_eJe( eJe );
178  task.set_eJe( eJe );
179 
180  // Compute the control law. Velocities are computed in the mobile robot reference frame
181  v = task.computeControlLaw();
182 
183  std::cout << "Send velocity to the pionner: " << v[0] << " m/s " << vpMath::deg( v[1] ) << " deg/s" << std::endl;
184 
185  // Send the velocity to the robot
186  robot.setVelocity( vpRobot::REFERENCE_FRAME, v );
187 
188  // Draw a vertical line which corresponds to the desired x coordinate of the dot cog
189  vpDisplay::displayLine( I, 0, 320, 479, 320, vpColor::red );
190  vpDisplay::flush( I );
191 
192  // A click in the viewer to exit
193  if ( vpDisplay::getClick( I, false ) )
194  break;
195  }
196 
197  std::cout << "Ending robot thread..." << std::endl;
198  robot.stopRunning();
199 
200  // wait for the thread to stop
201  robot.waitForRunExit();
202 
203  // Kill the servo task
204  task.print();
205  task.kill();
206  }
207  catch ( vpException e )
208  {
209  std::cout << "Catch an exception: " << e << std::endl;
210  return 1;
211  }
212 }
213 #else
214 int
215 main()
216 {
217  std::cout << "You don't have the right 3rd party libraries to run this example..." << std::endl;
218 }
219 #endif
main
int main(int argc, char **argv)
Definition: tutorial-visp-pioneer-visual-servo.cpp:43
parser
dot
TFSIMD_FORCE_INLINE tfScalar dot(const Quaternion &q1, const Quaternion &q2)
d
d


visp_ros
Author(s): Francois Pasteau, Fabien Spindler, Gatien Gaumerais, Alexander Oliva
autogenerated on Wed Mar 2 2022 01:13:33