blob_tracker.cpp
Go to the documentation of this file.
1 #include <boost/thread/mutex.hpp>
3 #include <ros/ros.h>
4 #include <std_msgs/Int8.h>
5 #include <std_msgs/String.h>
6 
7 #include <cv_bridge/cv_bridge.h>
8 
9 #include <visp_bridge/3dpose.h>
10 
11 #include <visp3/blob/vpDot2.h>
12 #include <visp3/core/vpImageConvert.h>
13 #include <visp3/core/vpPixelMeterConversion.h>
14 #include <visp3/gui/vpDisplayGDI.h>
15 #include <visp3/gui/vpDisplayOpenCV.h>
16 #include <visp3/gui/vpDisplayX.h>
17 #include <visp3/vision/vpPose.h>
18 
19 #include <visp_ros/BlobTracker.h>
20 
21 namespace
22 {
23 class BlobTracker
24 {
25  enum STATE_T
26  {
27  START,
28  INIT,
29  TRACK,
30  END,
31  QUIT
32  };
33 
34  ros::NodeHandle m_nh;
36  image_transport::Subscriber m_image_sub;
37  ros::Publisher m_tracker_publisher;
38  ros::Publisher m_status_publisher;
39  unsigned int m_queue_size;
40  STATE_T m_state;
41  boost::mutex m_lock;
42 #ifdef VISP_HAVE_X11
43  vpDisplayX *m_display;
44 #elif defined( VISP_HAVE_GDI )
45  vpDisplayGDI *m_display;
46 #elif defined( VISP_HAVE_OPENCV )
47  vpDisplayOpenCV *m_display;
48 #endif
49  vpImage< unsigned char > m_I_grayscale;
50  std::vector< vpDot2 > m_blob;
51  unsigned int m_nblobs;
52  double m_square_size;
53  vpCameraParameters m_cam;
54  std::vector< vpPoint > m_points_3d;
55  std::vector< vpImagePoint > m_points_2d;
56  vpHomogeneousMatrix m_cMo;
57  unsigned int m_thickness;
58  double m_cam_px;
59  double m_cam_py;
60  double m_cam_u0;
61  double m_cam_v0;
62  double m_cam_kud;
63  double m_cam_kdu;
64 
65 public:
66  BlobTracker()
67  : m_it( m_nh )
68  , m_image_sub()
69  , m_tracker_publisher()
70  , m_status_publisher()
71  , m_queue_size( 1 )
72  , m_state( START )
73  , m_lock()
74  , m_display( NULL )
75  , m_I_grayscale()
76  , m_blob()
77  , m_nblobs( 4 )
78  , m_square_size( 0.12 )
79  , m_cam()
80  , m_points_3d()
81  , m_points_2d()
82  , m_cMo()
83  , m_thickness( 2 )
84  , m_cam_px( -1 )
85  , m_cam_py( -1 )
86  , m_cam_u0( -1 )
87  , m_cam_v0( -1 )
88  , m_cam_kud( -1 )
89  , m_cam_kdu( -1 )
90  {
91  m_image_sub = m_it.subscribe( "/camera/image_raw", m_queue_size, &BlobTracker::callback, this );
92  m_tracker_publisher = m_nh.advertise< visp_ros::BlobTracker >( "blob_tracker/data", m_queue_size );
93  m_status_publisher = m_nh.advertise< std_msgs::Int8 >( "blob_tracker/status", m_queue_size );
94 
95  m_blob.resize( m_nblobs );
96  m_points_3d.resize( m_nblobs );
97  m_points_2d.resize( m_nblobs );
98 
99  // Load object size
100  m_nh.getParam( "square_size", m_square_size );
101 
102  // Load camera parameters
103  m_nh.getParam( "cam_px", m_cam_px );
104  m_nh.getParam( "cam_py", m_cam_py );
105  m_nh.getParam( "cam_u0", m_cam_u0 );
106  m_nh.getParam( "cam_v0", m_cam_v0 );
107  m_nh.getParam( "cam_kud", m_cam_kud );
108  m_nh.getParam( "cam_kdu", m_cam_kdu );
109 
110  if ( m_cam_px < 0 || m_cam_py < 0 || m_cam_u0 < 0 || m_cam_v0 < 0 )
111  {
112  ROS_ERROR( "Camera parameters are not set" );
113  }
114 
115  if ( m_cam_kud == -1 || m_cam_kdu == -1 )
116  {
117  m_cam.initPersProjWithoutDistortion( m_cam_px, m_cam_py, m_cam_u0, m_cam_v0 );
118  }
119  else
120  {
121  m_cam.initPersProjWithDistortion( m_cam_px, m_cam_py, m_cam_u0, m_cam_v0, m_cam_kud, m_cam_kdu );
122  }
123 
124  // Construct 3D model
125  //
126  // pt0 pt1
127  // x x
128  //
129  // --------------> x
130  // |
131  // pt3 | pt2
132  // x | x
133  // |
134  // \/ y
135 
136  m_points_3d[0] = vpPoint( -m_square_size / 2., -m_square_size / 2., 0 ); // top/left
137  m_points_3d[1] = vpPoint( m_square_size / 2., -m_square_size / 2., 0 ); // top/right
138  m_points_3d[2] = vpPoint( m_square_size / 2., m_square_size / 2., 0 ); // bottom/right
139  m_points_3d[3] = vpPoint( -m_square_size / 2., m_square_size / 2., 0 ); // bottom/left
140  }
141 
142  void spin()
143  {
144  ros::Rate loop_rate( 60 );
145  bool quit = false;
146  while ( m_nh.ok() && !quit )
147  {
148  {
149  boost::mutex::scoped_lock( m_lock );
150  switch ( m_state )
151  {
152  case START:
153  break;
154 
155  case INIT:
156  break;
157 
158  case TRACK:
159  break;
160 
161  case END:
162  break;
163 
164  case QUIT:
165  quit = true;
166  break;
167 
168  default:
169  break;
170  }
171  }
172 
173  ros::spinOnce();
174  loop_rate.sleep();
175  }
176  if ( m_display )
177  {
178  delete m_display;
179  m_display = NULL;
180  }
181  std::cout << "Image processing stopped..." << std::endl;
182  }
183 
184  void init_display()
185  {
186 #ifdef VISP_HAVE_X11
187  m_display = new vpDisplayX;
188 #elif VISP_HAVE_GDI
189  m_display = new vpDisplayGDI;
190 #elif VISP_HAVE_OPENCV
191  m_display = new vpDisplayOpenCV;
192 #endif
193  if ( m_display )
194  {
195  std::cout << "Image size: " << m_I_grayscale.getWidth() << " x " << m_I_grayscale.getHeight() << std::endl;
196  std::cout << "Camera parameters:\n" << m_cam << std::endl;
197  m_display->init( m_I_grayscale, 80, m_I_grayscale.getHeight() + 20, "Image processing" );
198  }
199  }
200 
201  void display( const sensor_msgs::ImageConstPtr &msg )
202  {
203  try
204  {
205  cv::Mat cv_frame = cv_bridge::toCvShare( msg, "bgr8" )->image;
206  vpImageConvert::convert( cv_frame, m_I_grayscale );
207 
208  if ( m_display == NULL )
209  {
210  init_display();
211  }
212 
213  vpDisplay::display( m_I_grayscale );
214  vpDisplay::displayText( m_I_grayscale, 20, 20, "Click to start initialisation", vpColor::red );
215  vpDisplay::flush( m_I_grayscale );
216 
217  if ( vpDisplay::getClick( m_I_grayscale, false ) )
218  {
219  m_state = INIT;
220  }
221  }
222  catch ( cv_bridge::Exception &e )
223  {
224  ROS_ERROR( "cv_bridge exception: %s", e.what() );
225  return;
226  }
227  }
228 
229  void callback( const sensor_msgs::ImageConstPtr &msg )
230  {
231  boost::mutex::scoped_lock( m_lock );
232 
233  switch ( m_state )
234  {
235  case START:
236  {
237  std_msgs::Int8 status_msg;
238  status_msg.data = 0;
239  m_status_publisher.publish( status_msg );
240 
241  display( msg );
242  break;
243  }
244 
245  case INIT:
246  {
247  std_msgs::Int8 status_msg;
248  status_msg.data = 0;
249  m_status_publisher.publish( status_msg );
250 
251  cv::Mat cv_frame = cv_bridge::toCvShare( msg, "bgr8" )->image;
252  vpImageConvert::convert( cv_frame, m_I_grayscale );
253 
254  vpDisplay::display( m_I_grayscale );
255  vpDisplay::displayText( m_I_grayscale, 20, 20, "Click in the 4 blobs clockwise to initialise the tracker",
256  vpColor::red );
257  vpDisplay::flush( m_I_grayscale );
258  try
259  {
260  for ( unsigned int i = 0; i < m_blob.size(); i++ )
261  {
262  m_blob[i].setGraphics( true );
263  m_blob[i].setGraphicsThickness( m_thickness );
264  m_blob[i].initTracking( m_I_grayscale );
265  m_blob[i].track( m_I_grayscale );
266  m_points_2d[i] = m_blob[i].getCog();
267 
268  std::stringstream ss;
269  ss << i;
270  vpDisplay::displayText( m_I_grayscale, m_blob[i].getCog() + vpImagePoint( -20, 20 ), ss.str(), vpColor::red );
271  vpDisplay::flush( m_I_grayscale );
272  }
273  compute_pose( m_points_3d, m_points_2d, m_cam, true, m_cMo );
274 
275  m_state = TRACK;
276  vpDisplay::flush( m_I_grayscale );
277  }
278  catch ( ... )
279  {
280  std::cout << "Tracking failed during initialisation" << std::endl;
281  m_state = START;
282  }
283 
284  break;
285  }
286 
287  case TRACK:
288  {
289  cv::Mat cv_frame = cv_bridge::toCvShare( msg, "bgr8" )->image;
290  vpImageConvert::convert( cv_frame, m_I_grayscale );
291 
292  vpDisplay::display( m_I_grayscale );
293  vpDisplay::displayText( m_I_grayscale, 20, 20, "Tracking in progress", vpColor::red );
294  vpDisplay::displayText( m_I_grayscale, 40, 20, "Right click to quit", vpColor::red );
295 
296  visp_ros::BlobTracker blob_tracker_msg;
297  try
298  {
299  for ( unsigned int i = 0; i < m_blob.size(); i++ )
300  {
301  m_blob[i].track( m_I_grayscale );
302  m_points_2d[i] = m_blob[i].getCog();
303  std::stringstream ss;
304  ss << i;
305  vpDisplay::displayText( m_I_grayscale, m_blob[i].getCog() + vpImagePoint( -20, 20 ), ss.str(), vpColor::red );
306  }
307  compute_pose( m_points_3d, m_points_2d, m_cam, false, m_cMo );
308  vpDisplay::displayFrame( m_I_grayscale, m_cMo, m_cam, m_square_size / 2., vpColor::none, m_thickness );
309 
310  // Publish tracker results
311  ros::Time now = ros::Time::now();
312  geometry_msgs::PoseStamped msg_pose;
313  msg_pose.header.stamp = now;
314  msg_pose.header.frame_id = msg->header.frame_id;
315  msg_pose.pose = visp_bridge::toGeometryMsgsPose( m_cMo ); // convert
316  blob_tracker_msg.pose_stamped = msg_pose;
318  cv_ptr->toImageMsg( blob_tracker_msg.image );
319 
320  for ( unsigned int i = 0; i < m_blob.size(); i++ )
321  {
322  visp_ros::ImagePoint ip;
323  ip.i = m_blob[i].getCog().get_i();
324  ip.j = m_blob[i].getCog().get_j();
325  blob_tracker_msg.blob_cogs.push_back( ip );
326 
327  visp_ros::ProjectedPoint pp;
328  vpPixelMeterConversion::convertPoint( m_cam, m_blob[i].getCog(), pp.x, pp.y );
329  blob_tracker_msg.blob_proj.push_back( pp );
330  }
331  vpDisplay::flush( m_I_grayscale );
332 
333  std_msgs::Int8 status_msg;
334  status_msg.data = 1;
335 
336  vpMouseButton::vpMouseButtonType button;
337  if ( vpDisplay::getClick( m_I_grayscale, button, false ) )
338  {
339  if ( button == vpMouseButton::button3 )
340  {
341  m_state = END;
342  status_msg.data = 2; // To stop visual servo controller
343  }
344  }
345 
346  m_tracker_publisher.publish( blob_tracker_msg );
347  m_status_publisher.publish( status_msg );
348  }
349  catch ( ... )
350  {
351  std::cout << "Tracking failed" << std::endl;
352  m_state = START;
353 
354  std_msgs::Int8 status_msg;
355  status_msg.data = 0;
356  m_status_publisher.publish( status_msg );
357  }
358 
359  break;
360  }
361 
362  case END:
363  {
364  std_msgs::Int8 status_msg;
365  status_msg.data = 0;
366  m_status_publisher.publish( status_msg );
367 
368  if ( m_display )
369  {
370  delete m_display;
371  m_display = NULL;
372  }
373  m_state = QUIT;
374 
375  break;
376  }
377 
378  default:
379  break;
380  }
381  }
382 
383  double compute_pose( std::vector< vpPoint > &points_3d, const std::vector< vpImagePoint > &points_2d,
384  const vpCameraParameters &cam, bool init, vpHomogeneousMatrix &cMo )
385  {
386  vpPose pose;
387  double x = 0, y = 0;
388  for ( unsigned int i = 0; i < points_3d.size(); i++ )
389  {
390  vpPixelMeterConversion::convertPoint( cam, points_2d[i], x, y );
391  points_3d[i].set_x( x );
392  points_3d[i].set_y( y );
393  pose.addPoint( points_3d[i] );
394  }
395 
396  if ( init == true )
397  {
398  vpHomogeneousMatrix cMo_dem;
399  vpHomogeneousMatrix cMo_lag;
400  double residual_dem = std::numeric_limits< double >::max();
401  double residual_lag = std::numeric_limits< double >::max();
402 
403  try
404  {
405  pose.computePose( vpPose::DEMENTHON, cMo_dem );
406  residual_dem = pose.computeResidual( cMo_dem );
407  }
408  catch ( ... )
409  {
410  residual_dem = std::numeric_limits< double >::max();
411  }
412  try
413  {
414  pose.computePose( vpPose::LAGRANGE, cMo_lag );
415  residual_lag = pose.computeResidual( cMo_lag );
416  }
417  catch ( ... )
418  {
419  residual_lag = std::numeric_limits< double >::max();
420  }
421  if ( residual_dem < residual_lag )
422  cMo = cMo_dem;
423  else
424  cMo = cMo_lag;
425  }
426  pose.computePose( vpPose::VIRTUAL_VS, cMo );
427  double residual = pose.computeResidual( cMo );
428 
429  return residual;
430  }
431 };
432 } // namespace
433 
434 int
435 main( int argc, char **argv )
436 {
437  ros::init( argc, argv, "visp_ros_blob_tracker_node" );
438  BlobTracker blobTracker;
439  blobTracker.spin();
440  return 0;
441 }
CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
Subscriber subscribe(const std::string &base_topic, uint32_t queue_size, const boost::function< void(const sensor_msgs::ImageConstPtr &)> &callback, const ros::VoidPtr &tracked_object=ros::VoidPtr(), const TransportHints &transport_hints=TransportHints())
void callback(const sensor_msgs::ImageConstPtr &image_msg)
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
int main(int argc, char **argv)
#define ROS_ERROR(...)
geometry_msgs::Pose toGeometryMsgsPose(const vpHomogeneousMatrix &mat)
void publish(const boost::shared_ptr< M > &message) const
bool getParam(const std::string &key, std::string &s) const
CvImagePtr toCvCopy(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
ROSCPP_DECL void spin()
static Time now()
ROSCPP_DECL void spinOnce()
bool ok() const


visp_ros
Author(s): Francois Pasteau, Fabien Spindler, Gatien Gaumerais, Alexander Oliva
autogenerated on Tue Mar 1 2022 00:03:19