1 #include <boost/thread/mutex.hpp>     4 #include <std_msgs/Int8.h>     5 #include <std_msgs/String.h>    11 #include <visp3/blob/vpDot2.h>    12 #include <visp3/core/vpImageConvert.h>    13 #include <visp3/core/vpPixelMeterConversion.h>    14 #include <visp3/gui/vpDisplayGDI.h>    15 #include <visp3/gui/vpDisplayOpenCV.h>    16 #include <visp3/gui/vpDisplayX.h>    17 #include <visp3/vision/vpPose.h>    19 #include <visp_ros/BlobTracker.h>    39   unsigned int m_queue_size;
    43   vpDisplayX *m_display;
    44 #elif defined( VISP_HAVE_GDI )    45   vpDisplayGDI *m_display;
    46 #elif defined( VISP_HAVE_OPENCV )    47   vpDisplayOpenCV *m_display;
    49   vpImage< unsigned char > m_I_grayscale;
    50   std::vector< vpDot2 > m_blob;
    51   unsigned int m_nblobs;
    53   vpCameraParameters m_cam;
    54   std::vector< vpPoint > m_points_3d;
    55   std::vector< vpImagePoint > m_points_2d;
    56   vpHomogeneousMatrix m_cMo;
    57   unsigned int m_thickness;
    69     , m_tracker_publisher()
    70     , m_status_publisher()
    78     , m_square_size( 0.12 )
    91     m_image_sub         = m_it.
subscribe( 
"/camera/image_raw", m_queue_size, &BlobTracker::callback, 
this );
    92     m_tracker_publisher = m_nh.
advertise< visp_ros::BlobTracker >( 
"blob_tracker/data", m_queue_size );
    93     m_status_publisher  = m_nh.
advertise< std_msgs::Int8 >( 
"blob_tracker/status", m_queue_size );
    95     m_blob.resize( m_nblobs );
    96     m_points_3d.resize( m_nblobs );
    97     m_points_2d.resize( m_nblobs );
   100     m_nh.
getParam( 
"square_size", m_square_size );
   103     m_nh.
getParam( 
"cam_px", m_cam_px );
   104     m_nh.
getParam( 
"cam_py", m_cam_py );
   105     m_nh.
getParam( 
"cam_u0", m_cam_u0 );
   106     m_nh.
getParam( 
"cam_v0", m_cam_v0 );
   107     m_nh.
getParam( 
"cam_kud", m_cam_kud );
   108     m_nh.
getParam( 
"cam_kdu", m_cam_kdu );
   110     if ( m_cam_px < 0 || m_cam_py < 0 || m_cam_u0 < 0 || m_cam_v0 < 0 )
   112       ROS_ERROR( 
"Camera parameters are not set" );
   115     if ( m_cam_kud == -1 || m_cam_kdu == -1 )
   117       m_cam.initPersProjWithoutDistortion( m_cam_px, m_cam_py, m_cam_u0, m_cam_v0 );
   121       m_cam.initPersProjWithDistortion( m_cam_px, m_cam_py, m_cam_u0, m_cam_v0, m_cam_kud, m_cam_kdu );
   136     m_points_3d[0] = vpPoint( -m_square_size / 2., -m_square_size / 2., 0 ); 
   137     m_points_3d[1] = vpPoint( m_square_size / 2., -m_square_size / 2., 0 );  
   138     m_points_3d[2] = vpPoint( m_square_size / 2., m_square_size / 2., 0 );   
   139     m_points_3d[3] = vpPoint( -m_square_size / 2., m_square_size / 2., 0 );  
   146     while ( m_nh.
ok() && !quit )
   149         boost::mutex::scoped_lock( m_lock );
   181     std::cout << 
"Image processing stopped..." << std::endl;
   187     m_display = 
new vpDisplayX;
   189     m_display = 
new vpDisplayGDI;
   190 #elif VISP_HAVE_OPENCV   191     m_display = 
new vpDisplayOpenCV;
   195       std::cout << 
"Image size: " << m_I_grayscale.getWidth() << 
" x " << m_I_grayscale.getHeight() << std::endl;
   196       std::cout << 
"Camera parameters:\n" << m_cam << std::endl;
   197       m_display->init( m_I_grayscale, 80, m_I_grayscale.getHeight() + 20, 
"Image processing" );
   201   void display( 
const sensor_msgs::ImageConstPtr &msg )
   206       vpImageConvert::convert( cv_frame, m_I_grayscale );
   208       if ( m_display == NULL )
   213       vpDisplay::display( m_I_grayscale );
   214       vpDisplay::displayText( m_I_grayscale, 20, 20, 
"Click to start initialisation", vpColor::red );
   215       vpDisplay::flush( m_I_grayscale );
   217       if ( vpDisplay::getClick( m_I_grayscale, 
false ) )
   224       ROS_ERROR( 
"cv_bridge exception: %s", e.what() );
   229   void callback( 
const sensor_msgs::ImageConstPtr &msg )
   231     boost::mutex::scoped_lock( m_lock );
   237       std_msgs::Int8 status_msg;
   239       m_status_publisher.publish( status_msg );
   247       std_msgs::Int8 status_msg;
   249       m_status_publisher.publish( status_msg );
   252       vpImageConvert::convert( cv_frame, m_I_grayscale );
   254       vpDisplay::display( m_I_grayscale );
   255       vpDisplay::displayText( m_I_grayscale, 20, 20, 
"Click in the 4 blobs clockwise to initialise the tracker",
   257       vpDisplay::flush( m_I_grayscale );
   260         for ( 
unsigned int i = 0; i < m_blob.size(); i++ )
   262           m_blob[i].setGraphics( 
true );
   263           m_blob[i].setGraphicsThickness( m_thickness );
   264           m_blob[i].initTracking( m_I_grayscale );
   265           m_blob[i].track( m_I_grayscale );
   266           m_points_2d[i] = m_blob[i].getCog();
   268           std::stringstream ss;
   270           vpDisplay::displayText( m_I_grayscale, m_blob[i].getCog() + vpImagePoint( -20, 20 ), ss.str(), vpColor::red );
   271           vpDisplay::flush( m_I_grayscale );
   273         compute_pose( m_points_3d, m_points_2d, m_cam, 
true, m_cMo );
   276         vpDisplay::flush( m_I_grayscale );
   280         std::cout << 
"Tracking failed during initialisation" << std::endl;
   290       vpImageConvert::convert( cv_frame, m_I_grayscale );
   292       vpDisplay::display( m_I_grayscale );
   293       vpDisplay::displayText( m_I_grayscale, 20, 20, 
"Tracking in progress", vpColor::red );
   294       vpDisplay::displayText( m_I_grayscale, 40, 20, 
"Right click to quit", vpColor::red );
   296       visp_ros::BlobTracker blob_tracker_msg;
   299         for ( 
unsigned int i = 0; i < m_blob.size(); i++ )
   301           m_blob[i].track( m_I_grayscale );
   302           m_points_2d[i] = m_blob[i].getCog();
   303           std::stringstream ss;
   305           vpDisplay::displayText( m_I_grayscale, m_blob[i].getCog() + vpImagePoint( -20, 20 ), ss.str(), vpColor::red );
   307         compute_pose( m_points_3d, m_points_2d, m_cam, 
false, m_cMo );
   308         vpDisplay::displayFrame( m_I_grayscale, m_cMo, m_cam, m_square_size / 2., vpColor::none, m_thickness );
   312         geometry_msgs::PoseStamped msg_pose;
   313         msg_pose.header.stamp         = now;
   314         msg_pose.header.frame_id      = msg->header.frame_id;
   316         blob_tracker_msg.pose_stamped = msg_pose;
   318         cv_ptr->toImageMsg( blob_tracker_msg.image );
   320         for ( 
unsigned int i = 0; i < m_blob.size(); i++ )
   322           visp_ros::ImagePoint ip;
   323           ip.i = m_blob[i].getCog().get_i();
   324           ip.j = m_blob[i].getCog().get_j();
   325           blob_tracker_msg.blob_cogs.push_back( ip );
   327           visp_ros::ProjectedPoint pp;
   328           vpPixelMeterConversion::convertPoint( m_cam, m_blob[i].getCog(), pp.x, pp.y );
   329           blob_tracker_msg.blob_proj.push_back( pp );
   331         vpDisplay::flush( m_I_grayscale );
   333         std_msgs::Int8 status_msg;
   336         vpMouseButton::vpMouseButtonType button;
   337         if ( vpDisplay::getClick( m_I_grayscale, button, 
false ) )
   339           if ( button == vpMouseButton::button3 )
   346         m_tracker_publisher.
publish( blob_tracker_msg );
   347         m_status_publisher.publish( status_msg );
   351         std::cout << 
"Tracking failed" << std::endl;
   354         std_msgs::Int8 status_msg;
   356         m_status_publisher.publish( status_msg );
   364       std_msgs::Int8 status_msg;
   366       m_status_publisher.publish( status_msg );
   383   double compute_pose( std::vector< vpPoint > &points_3d, 
const std::vector< vpImagePoint > &points_2d,
   384                        const vpCameraParameters &cam, 
bool init, vpHomogeneousMatrix &cMo )
   388     for ( 
unsigned int i = 0; i < points_3d.size(); i++ )
   390       vpPixelMeterConversion::convertPoint( cam, points_2d[i], x, y );
   391       points_3d[i].set_x( x );
   392       points_3d[i].set_y( y );
   393       pose.addPoint( points_3d[i] );
   398       vpHomogeneousMatrix cMo_dem;
   399       vpHomogeneousMatrix cMo_lag;
   400       double residual_dem = std::numeric_limits< double >::max();
   401       double residual_lag = std::numeric_limits< double >::max();
   405         pose.computePose( vpPose::DEMENTHON, cMo_dem );
   406         residual_dem = pose.computeResidual( cMo_dem );
   410         residual_dem = std::numeric_limits< double >::max();
   414         pose.computePose( vpPose::LAGRANGE, cMo_lag );
   415         residual_lag = pose.computeResidual( cMo_lag );
   419         residual_lag = std::numeric_limits< double >::max();
   421       if ( residual_dem < residual_lag )
   426     pose.computePose( vpPose::VIRTUAL_VS, cMo );
   427     double residual = pose.computeResidual( cMo );
   437   ros::init( argc, argv, 
"visp_ros_blob_tracker_node" );
   438   BlobTracker blobTracker;
 CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
Subscriber subscribe(const std::string &base_topic, uint32_t queue_size, const boost::function< void(const sensor_msgs::ImageConstPtr &)> &callback, const ros::VoidPtr &tracked_object=ros::VoidPtr(), const TransportHints &transport_hints=TransportHints())
void callback(const sensor_msgs::ImageConstPtr &image_msg)
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
int main(int argc, char **argv)
geometry_msgs::Pose toGeometryMsgsPose(const vpHomogeneousMatrix &mat)
void publish(const boost::shared_ptr< M > &message) const
bool getParam(const std::string &key, std::string &s) const
CvImagePtr toCvCopy(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
ROSCPP_DECL void spinOnce()