tutorial-franka-real-pbvs-apriltag.cpp
Go to the documentation of this file.
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2021 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  *****************************************************************************/
32 
56 #include <iostream>
57 
58 #include <visp3/core/vpCameraParameters.h>
59 #include <visp3/detection/vpDetectorAprilTag.h>
60 #include <visp3/gui/vpDisplayOpenCV.h>
61 #include <visp3/gui/vpPlot.h>
62 #include <visp3/io/vpImageIo.h>
63 #include <visp3/visual_features/vpFeatureThetaU.h>
64 #include <visp3/visual_features/vpFeatureTranslation.h>
65 #include <visp3/vs/vpServo.h>
66 #include <visp3/vs/vpServoDisplay.h>
67 
68 #include <visp3/robot/vpRobotFranka.h>
69 #include <visp3/sensor/vpRealSense2.h>
70 
71 #if defined( VISP_HAVE_REALSENSE2 ) && ( VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11 ) && \
72  ( defined( VISP_HAVE_OPENCV ) ) && defined( VISP_HAVE_FRANKA )
73 
74 void
75 display_point_trajectory( const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip,
76  std::vector< vpImagePoint > *traj_vip )
77 {
78  for ( size_t i = 0; i < vip.size(); i++ )
79  {
80  if ( traj_vip[i].size() )
81  {
82  // Add the point only if distance with the previous > 1 pixel
83  if ( vpImagePoint::distance( vip[i], traj_vip[i].back() ) > 1. )
84  {
85  traj_vip[i].push_back( vip[i] );
86  }
87  }
88  else
89  {
90  traj_vip[i].push_back( vip[i] );
91  }
92  }
93  for ( size_t i = 0; i < vip.size(); i++ )
94  {
95  for ( size_t j = 1; j < traj_vip[i].size(); j++ )
96  {
97  vpDisplay::displayLine( I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2 );
98  }
99  }
100 }
101 
102 int
103 main( int argc, char **argv )
104 {
105  double opt_tagSize = 0.08;
106  std::string opt_robot_ip = "192.168.1.1";
107  std::string opt_eMc_filename = "";
108  bool display_tag = true;
109  int opt_quad_decimate = 2;
110  bool opt_verbose = false;
111  bool opt_plot = false;
112  bool opt_adaptive_gain = false;
113  bool opt_task_sequencing = false;
114  double convergence_threshold_t = 0.0005, convergence_threshold_tu = vpMath::rad( 0.5 );
115 
116  for ( int i = 1; i < argc; i++ )
117  {
118  if ( std::string( argv[i] ) == "--tag_size" && i + 1 < argc )
119  {
120  opt_tagSize = std::stod( argv[i + 1] );
121  }
122  else if ( std::string( argv[i] ) == "--ip" && i + 1 < argc )
123  {
124  opt_robot_ip = std::string( argv[i + 1] );
125  }
126  else if ( std::string( argv[i] ) == "--eMc" && i + 1 < argc )
127  {
128  opt_eMc_filename = std::string( argv[i + 1] );
129  }
130  else if ( std::string( argv[i] ) == "--verbose" )
131  {
132  opt_verbose = true;
133  }
134  else if ( std::string( argv[i] ) == "--plot" )
135  {
136  opt_plot = true;
137  }
138  else if ( std::string( argv[i] ) == "--adaptive_gain" )
139  {
140  opt_adaptive_gain = true;
141  }
142  else if ( std::string( argv[i] ) == "--task_sequencing" )
143  {
144  opt_task_sequencing = true;
145  }
146  else if ( std::string( argv[i] ) == "--quad_decimate" && i + 1 < argc )
147  {
148  opt_quad_decimate = std::stoi( argv[i + 1] );
149  }
150  else if ( std::string( argv[i] ) == "--no-convergence-threshold" )
151  {
152  convergence_threshold_t = 0.;
153  convergence_threshold_tu = 0.;
154  }
155  else if ( std::string( argv[i] ) == "--help" || std::string( argv[i] ) == "-h" )
156  {
157  std::cout
158  << argv[0] << " [--ip <default " << opt_robot_ip << ">] "
159  << "[--tag_size <marker size in meter; default " << opt_tagSize << ">] [--eMc <eMc extrinsic file>] "
160  << "[--quad_decimate <decimation; default " << opt_quad_decimate
161  << ">] [--adaptive_gain] [--plot] [--task_sequencing] [--no-convergence-threshold] [--verbose] [--help] [-h]"
162  << "\n";
163  return EXIT_SUCCESS;
164  }
165  }
166 
167  vpRobotFranka robot;
168 
169  try
170  {
171  std::cout << "Try to connect to robot ip: " << opt_robot_ip << std::endl;
172  robot.connect( opt_robot_ip );
173  std::cout << "Panda robot connected" << std::endl;
174 
175  vpRealSense2 rs;
176  rs2::config config;
177  unsigned int width = 640, height = 480;
178  config.enable_stream( RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30 );
179  config.enable_stream( RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30 );
180  config.enable_stream( RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30 );
181  std::cout << "Try to connect to Realsense camera " << std::endl;
182  rs.open( config );
183  std::cout << "Camera connected" << std::endl;
184 
185  // Get camera extrinsics
186  vpPoseVector ePc;
187  // Set camera extrinsics default values corresponding to
188  // - an Intel RealSense D435 camera
189  // - attached to the Franka end-effector using the mechanical interface corresponding to
190  // the cad model given in franka-rs-D435-camera-holder.stl file
191  ePc[0] = 0.0564668;
192  ePc[1] = -0.0375079;
193  ePc[2] = -0.150416 ;
194  ePc[3] = 0.0102548;
195  ePc[4] = -0.0012236;
196  ePc[5] = 1.5412;
197 
198  // If provided, read camera extrinsics from --eMc <file>
199  if ( !opt_eMc_filename.empty() )
200  {
201  ePc.loadYAML( opt_eMc_filename, ePc );
202  }
203  else
204  {
205  std::cout << "Warning, opt_eMc_filename is empty! Use hard coded values."
206  << "\n";
207  }
208  vpHomogeneousMatrix eMc( ePc );
209  std::cout << "eMc:\n" << eMc << "\n";
210  std::cout << "ePc:\n" << vpPoseVector(eMc).t() << "\n";
211 
212  // Get camera intrinsics
213  vpCameraParameters cam =
214  rs.getCameraParameters( RS2_STREAM_COLOR, vpCameraParameters::perspectiveProjWithDistortion );
215  std::cout << "cam:\n" << cam << "\n";
216 
217  vpImage< unsigned char > I( height, width );
218 
219  vpDisplayOpenCV dc( I, 10, 10, "Color image" );
220 
221  vpDetectorAprilTag::vpAprilTagFamily tagFamily = vpDetectorAprilTag::TAG_36h11;
222  vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::HOMOGRAPHY_VIRTUAL_VS;
223  // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
224  vpDetectorAprilTag detector( tagFamily );
225  detector.setAprilTagPoseEstimationMethod( poseEstimationMethod );
226  detector.setDisplayTag( display_tag );
227  detector.setAprilTagQuadDecimate( opt_quad_decimate );
228 
229  // Servo
230  vpHomogeneousMatrix cdMc, cMo, oMo;
231 
232  // Desired pose to reach
233  vpHomogeneousMatrix cdMo( vpTranslationVector( 0, 0, opt_tagSize * 3 ), // 3 times tag with along camera z axis
234  vpRotationMatrix( { 1, 0, 0, 0, -1, 0, 0, 0, -1 } ) );
235 
236  cdMc = cdMo * cMo.inverse();
237  vpFeatureTranslation t( vpFeatureTranslation::cdMc );
238  vpFeatureThetaU tu( vpFeatureThetaU::cdRc );
239  t.buildFrom( cdMc );
240  tu.buildFrom( cdMc );
241 
242  vpFeatureTranslation td( vpFeatureTranslation::cdMc );
243  vpFeatureThetaU tud( vpFeatureThetaU::cdRc );
244 
245  vpServo task;
246  task.addFeature( t, td );
247  task.addFeature( tu, tud );
248  task.setServo( vpServo::EYEINHAND_CAMERA );
249  task.setInteractionMatrixType( vpServo::CURRENT );
250 
251  if ( opt_adaptive_gain )
252  {
253  vpAdaptiveGain lambda( 1.5, 0.4, 30 ); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
254  task.setLambda( lambda );
255  }
256  else
257  {
258  task.setLambda( 0.5 );
259  }
260 
261  vpPlot *plotter = nullptr;
262  int iter_plot = 0;
263 
264  if ( opt_plot )
265  {
266  plotter = new vpPlot( 2, static_cast< int >( 250 * 2 ), 500, static_cast< int >( I.getWidth() ) + 80, 10,
267  "Real time curves plotter" );
268  plotter->setTitle( 0, "Visual features error" );
269  plotter->setTitle( 1, "Camera velocities" );
270  plotter->initGraph( 0, 6 );
271  plotter->initGraph( 1, 6 );
272  plotter->setLegend( 0, 0, "error_feat_tx" );
273  plotter->setLegend( 0, 1, "error_feat_ty" );
274  plotter->setLegend( 0, 2, "error_feat_tz" );
275  plotter->setLegend( 0, 3, "error_feat_theta_ux" );
276  plotter->setLegend( 0, 4, "error_feat_theta_uy" );
277  plotter->setLegend( 0, 5, "error_feat_theta_uz" );
278  plotter->setLegend( 1, 0, "vc_x" );
279  plotter->setLegend( 1, 1, "vc_y" );
280  plotter->setLegend( 1, 2, "vc_z" );
281  plotter->setLegend( 1, 3, "wc_x" );
282  plotter->setLegend( 1, 4, "wc_y" );
283  plotter->setLegend( 1, 5, "wc_z" );
284  }
285 
286  bool final_quit = false;
287  bool has_converged = false;
288  bool send_velocities = false;
289  bool servo_started = false;
290  std::vector< vpImagePoint > *traj_vip = nullptr; // To memorize point trajectory
291 
292  static double t_init_servo = vpTime::measureTimeMs();
293 
294  robot.set_eMc( eMc ); // Set location of the camera wrt end-effector frame
295  robot.setRobotState( vpRobot::STATE_VELOCITY_CONTROL );
296 
297  while ( !has_converged && !final_quit )
298  {
299  double t_start = vpTime::measureTimeMs();
300 
301  rs.acquire( I );
302 
303  vpDisplay::display( I );
304 
305  std::vector< vpHomogeneousMatrix > cMo_vec;
306  detector.detect( I, opt_tagSize, cam, cMo_vec );
307 
308  std::stringstream ss;
309  ss << "Left click to " << ( send_velocities ? "stop the robot" : "servo the robot" ) << ", right click to quit.";
310  vpDisplay::displayText( I, 20, 20, ss.str(), vpColor::red );
311 
312  vpColVector v_c( 6 );
313 
314  // Only one tag is detected
315  if ( cMo_vec.size() == 1 )
316  {
317  cMo = cMo_vec[0];
318 
319  static bool first_time = true;
320  if ( first_time )
321  {
322  // Introduce security wrt tag positionning in order to avoid PI rotation
323  std::vector< vpHomogeneousMatrix > v_oMo( 2 ), v_cdMc( 2 );
324  v_oMo[1].buildFrom( 0, 0, 0, 0, 0, M_PI );
325  for ( size_t i = 0; i < 2; i++ )
326  {
327  v_cdMc[i] = cdMo * v_oMo[i] * cMo.inverse();
328  }
329  if ( std::fabs( v_cdMc[0].getThetaUVector().getTheta() ) <
330  std::fabs( v_cdMc[1].getThetaUVector().getTheta() ) )
331  {
332  oMo = v_oMo[0];
333  }
334  else
335  {
336  std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
337  oMo = v_oMo[1]; // Introduce PI rotation
338  }
339  }
340 
341  // Update visual features
342  cdMc = cdMo * oMo * cMo.inverse();
343  t.buildFrom( cdMc );
344  tu.buildFrom( cdMc );
345 
346  if ( opt_task_sequencing )
347  {
348  if ( !servo_started )
349  {
350  if ( send_velocities )
351  {
352  servo_started = true;
353  }
354  t_init_servo = vpTime::measureTimeMs();
355  }
356  v_c = task.computeControlLaw( ( vpTime::measureTimeMs() - t_init_servo ) / 1000. );
357  }
358  else
359  {
360  v_c = task.computeControlLaw();
361  }
362 
363  // Display desired and current pose features
364  vpDisplay::displayFrame( I, cdMo * oMo, cam, opt_tagSize / 1.5, vpColor::yellow, 2 );
365  vpDisplay::displayFrame( I, cMo, cam, opt_tagSize / 2, vpColor::none, 3 );
366  // Get tag corners
367  std::vector< vpImagePoint > vip = detector.getPolygon( 0 );
368  // Get the tag cog corresponding to the projection of the tag frame in the image
369  vip.push_back( detector.getCog( 0 ) );
370  // Display the trajectory of the points
371  if ( first_time )
372  {
373  traj_vip = new std::vector< vpImagePoint >[vip.size()];
374  }
375  display_point_trajectory( I, vip, traj_vip );
376 
377  if ( opt_plot )
378  {
379  plotter->plot( 0, iter_plot, task.getError() );
380  plotter->plot( 1, iter_plot, v_c );
381  iter_plot++;
382  }
383 
384  if ( opt_verbose )
385  {
386  std::cout << "v_c: " << v_c.t() << std::endl;
387  }
388 
389  vpTranslationVector cd_t_c = cdMc.getTranslationVector();
390  vpThetaUVector cd_tu_c = cdMc.getThetaUVector();
391  double error_tr = sqrt( cd_t_c.sumSquare() );
392  double error_tu = vpMath::deg( sqrt( cd_tu_c.sumSquare() ) );
393 
394  ss.str( "" );
395  ss << "error_t: " << error_tr;
396  vpDisplay::displayText( I, 20, static_cast< int >( I.getWidth() ) - 150, ss.str(), vpColor::red );
397  ss.str( "" );
398  ss << "error_tu: " << error_tu;
399  vpDisplay::displayText( I, 40, static_cast< int >( I.getWidth() ) - 150, ss.str(), vpColor::red );
400 
401  if ( opt_verbose )
402  std::cout << "error translation: " << error_tr << " ; error rotation: " << error_tu << std::endl;
403 
404  if ( error_tr < convergence_threshold_t && error_tu < convergence_threshold_tu )
405  {
406  has_converged = true;
407  std::cout << "Servo task has converged" << std::endl;
408  ;
409  vpDisplay::displayText( I, 100, 20, "Servo task has converged", vpColor::red );
410  }
411 
412  if ( first_time )
413  {
414  first_time = false;
415  }
416  } // end if (cMo_vec.size() == 1)
417  else
418  {
419  v_c = 0;
420  }
421 
422  if ( !send_velocities )
423  {
424  v_c = 0;
425  }
426 
427  // Send to the robot
428  robot.setVelocity( vpRobot::CAMERA_FRAME, v_c );
429 
430  ss.str( "" );
431  ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
432  vpDisplay::displayText( I, 40, 20, ss.str(), vpColor::red );
433  vpDisplay::flush( I );
434 
435  vpMouseButton::vpMouseButtonType button;
436  if ( vpDisplay::getClick( I, button, false ) )
437  {
438  switch ( button )
439  {
440  case vpMouseButton::button1:
441  send_velocities = !send_velocities;
442  break;
443 
444  case vpMouseButton::button3:
445  final_quit = true;
446  v_c = 0;
447  break;
448 
449  default:
450  break;
451  }
452  }
453  }
454  std::cout << "Stop the robot " << std::endl;
455  robot.setRobotState( vpRobot::STATE_STOP );
456 
457  if ( opt_plot && plotter != nullptr )
458  {
459  delete plotter;
460  plotter = nullptr;
461  }
462 
463  if ( !final_quit )
464  {
465  while ( !final_quit )
466  {
467  rs.acquire( I );
468  vpDisplay::display( I );
469 
470  vpDisplay::displayText( I, 20, 20, "Click to quit the program.", vpColor::red );
471  vpDisplay::displayText( I, 40, 20, "Visual servo converged.", vpColor::red );
472 
473  if ( vpDisplay::getClick( I, false ) )
474  {
475  final_quit = true;
476  }
477 
478  vpDisplay::flush( I );
479  }
480  }
481 
482  if ( traj_vip )
483  {
484  delete[] traj_vip;
485  }
486  }
487  catch ( const vpException &e )
488  {
489  std::cout << "ViSP exception: " << e.what() << std::endl;
490  std::cout << "Stop the robot " << std::endl;
491  robot.setRobotState( vpRobot::STATE_STOP );
492  return EXIT_FAILURE;
493  }
494  catch ( const franka::NetworkException &e )
495  {
496  std::cout << "Franka network exception: " << e.what() << std::endl;
497  std::cout << "Check if you are connected to the Franka robot"
498  << " or if you specified the right IP using --ip command line option set by default to 192.168.1.1. "
499  << std::endl;
500  return EXIT_FAILURE;
501  }
502  catch ( const std::exception &e )
503  {
504  std::cout << "Franka exception: " << e.what() << std::endl;
505  return EXIT_FAILURE;
506  }
507 
508  return 0;
509 }
510 #else
511 int
513 {
514 #if !defined( VISP_HAVE_REALSENSE2 )
515  std::cout << "Install librealsense-2.x" << std::endl;
516 #endif
517 #if ( VISP_CXX_STANDARD < VISP_CXX_STANDARD_11 )
518  std::cout << "Build ViSP with c++11 or higher compiler flag (cmake -DUSE_CXX_STANDARD=11)." << std::endl;
519 #endif
520 #if !defined( VISP_HAVE_FRANKA )
521  std::cout << "Install libfranka." << std::endl;
522 #endif
523  return 0;
524 }
525 #endif
display_point_trajectory
void display_point_trajectory(const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip, std::vector< vpImagePoint > *traj_vip)
Definition: tutorial-franka-coppeliasim-dual-arm.cpp:53
main
int main()
Definition: tutorial-franka-real-pbvs-apriltag.cpp:512


visp_ros
Author(s): Francois Pasteau, Fabien Spindler, Gatien Gaumerais, Alexander Oliva
autogenerated on Wed Mar 2 2022 01:13:33