Example that shows how to create a ROS node able to control the Pioneer mobile robot by IBVS visual servoing with respect to a blob. The current visual features that are used are s = (x, log(Z/Z*)). The desired one are s* = (x*, 0), with:
The degrees of freedom that are controlled are (vx, wz), where wz is the rotational velocity and vx the translational velocity of the mobile platform at point M located at the middle between the two wheels.
The feature x allows to control wy, while log(Z/Z*) allows to control vz. The value of x is measured thanks to a blob tracker. The value of Z is estimated from the surface of the blob that is proportional to the depth Z.
 
#include <iostream>
 
#include <visp/vpCameraParameters.h>
#include <visp/vpDisplayX.h>
#include <visp/vpDot2.h>
#include <visp/vpFeatureBuilder.h>
#include <visp/vpFeatureDepth.h>
#include <visp/vpFeaturePoint.h>
#include <visp/vpHomogeneousMatrix.h>
#include <visp/vpImage.h>
#include <visp/vpImageConvert.h>
#include <visp/vpServo.h>
#include <visp/vpVelocityTwistMatrix.h>
 
 
#if defined( VISP_HAVE_DC1394_2 ) && defined( VISP_HAVE_X11 )
#define TEST_COULD_BE_ACHIEVED
#endif
 
#ifdef TEST_COULD_BE_ACHIEVED
int
main( 
int argc, 
char **argv )
 
{
  try
  {
    vpImage< unsigned char > I; 
    double depth  = 1.;
    double lambda = 0.6;
    double coef   = 1. / 6.77; 
 
    robot.
init( argc, argv );
 
    
    
    
    
    vpTime::sleepMs( 3000 );
 
    std::cout << "Robot connected" << std::endl;
 
    
    vpCameraParameters cam;
 
    
    
      cam.initPersProjWithoutDistortion( 600, 600, I.getWidth() / 2, I.getHeight() / 2 );
 
 
    
    vpDisplayX 
d( I, 10, 10, 
"Current frame" );
    vpDisplay::display( I );
    vpDisplay::flush( I );
 
    
    dot.setComputeMoments( 
true );
 
    dot.setEllipsoidShapePrecision( 0. );       
 
    dot.setGrayLevelPrecision( 0.9 );           
 
    dot.setEllipsoidBadPointsPercentage( 0.5 ); 
 
    vpDisplay::flush( I );
 
    vpServo task;
    task.setServo( vpServo::EYEINHAND_L_cVe_eJe );
    task.setInteractionMatrixType( vpServo::DESIRED, vpServo::PSEUDO_INVERSE );
    task.setLambda( lambda );
    vpVelocityTwistMatrix cVe;
    cVe = robot.get_cVe();
    task.set_cVe( cVe );
 
    std::cout << "cVe: \n" << cVe << std::endl;
 
    vpMatrix eJe;
    task.set_eJe( eJe );
    std::cout << "eJe: \n" << eJe << std::endl;
 
    
    vpFeaturePoint s_x, s_xd;
 
    
    vpFeatureBuilder::create( s_x, cam, 
dot );
 
    
    s_xd.buildFrom( 0, 0, depth );
 
    
    task.addFeature( s_x, s_xd );
 
    
    vpFeatureDepth s_Z, s_Zd;
    
    double surface = 1. / sqrt( 
dot.m00 / ( cam.get_px() * cam.get_py() ) );
 
    double Z, Zd;
    
    Z = coef * surface;
    
    Zd = Z;
 
    std::cout << "Z " << Z << std::endl;
    s_Z.buildFrom( s_x.get_x(), s_x.get_y(), Z, 0 );   
    s_Zd.buildFrom( s_x.get_x(), s_x.get_y(), Zd, 0 ); 
 
    
    task.addFeature( s_Z, s_Zd );
 
    vpColVector v; 
 
    while ( 1 )
    {
      
 
      
      vpDisplay::display( I );
 
      
      
      vpFeatureBuilder::create( s_x, cam, 
dot );
 
      
      surface = 1. / sqrt( 
dot.m00 / ( cam.get_px() * cam.get_py() ) );
      Z       = coef * surface;
      s_Z.buildFrom( s_x.get_x(), s_x.get_y(), Z, log( Z / Zd ) );
 
      robot.get_cVe( cVe );
      task.set_cVe( cVe );
 
      task.set_eJe( eJe );
 
      
      v = task.computeControlLaw();
 
      std::cout << "Send velocity to the pionner: " << v[0] << " m/s " << vpMath::deg( v[1] ) << " deg/s" << std::endl;
 
      
 
      
      vpDisplay::displayLine( I, 0, 320, 479, 320, vpColor::red );
      vpDisplay::flush( I );
 
      
      if ( vpDisplay::getClick( I, false ) )
        break;
    }
 
    std::cout << "Ending robot thread..." << std::endl;
 
    
    task.print();
    task.kill();
  }
  catch ( vpException e )
  {
    std::cout << "Catch an exception: " << e << std::endl;
    return 1;
  }
}
#else
int
{
  std::cout << "You don't have the right 3rd party libraries to run this example..." << std::endl;
}
#endif