image_processing.cpp
Go to the documentation of this file.
1 /****************************************************************************
2  *
3  * $Id: file.cpp 3496 2011-11-22 15:14:32Z fnovotny $
4  *
5  * This file is part of the ViSP software.
6  * Copyright (C) 2005 - 2012 by INRIA. All rights reserved.
7  *
8  * This software is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU General Public License
10  * ("GPL") version 2 as published by the Free Software Foundation.
11  * See the file LICENSE.txt at the root directory of this source
12  * distribution for additional information about the GNU GPL.
13  *
14  * For using ViSP with software that can not be combined with the GNU
15  * GPL, please contact INRIA about acquiring a ViSP Professional
16  * Edition License.
17  *
18  * See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19  *
20  * This software was developed at:
21  * INRIA Rennes - Bretagne Atlantique
22  * Campus Universitaire de Beaulieu
23  * 35042 Rennes Cedex
24  * France
25  * http://www.irisa.fr/lagadic
26  *
27  * If you have questions regarding the use of this file, please contact
28  * INRIA at visp@inria.fr
29  *
30  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32  *
33  * Contact visp@irisa.fr if any conditions of this licensing are
34  * not clear to you.
35  *
36  * Description:
37  *
38  *
39  * Authors:
40  * Filip Novotny
41  *
42  *
43  *****************************************************************************/
44 
50 #include "image_processing.h"
51 #include "visp/vpTrackingException.h"
52 #include "visp_camera_calibration/CalibPointArray.h"
53 #include "visp_camera_calibration/CalibPoint.h"
54 #include "visp_camera_calibration/calibrate.h"
55 #include "visp/vpMouseButton.h"
56 
57 #include "names.h"
58 #include <visp_bridge/image.h>
59 #include "sensor_msgs/SetCameraInfo.h"
61 
62 #include "visp/vpDisplayX.h"
63 #include "visp/vpImagePoint.h"
64 #include "visp/vpPixelMeterConversion.h"
65 #include "visp/vpMeterPixelConversion.h"
66 #include "visp/vpPose.h"
67 #include "visp/vpHomogeneousMatrix.h"
68 #include "visp/vpDot.h"
69 #include "visp/vpDot2.h"
70 #include "visp/vpCalibration.h"
71 #include <boost/format.hpp>
72 #include <iostream>
73 
74 
76 {
78  spinner_(0),
79  queue_size_(1000),
80  pause_image_(false),
81  img_(480,640,128),
82  cam_(600,600,0,0),
83  is_initialized(false)
84 {
86  //Setup ROS environment
87  //prepare function objects,define publishers & subscribers
88  raw_image_subscriber_callback_t raw_image_callback = boost::bind(&ImageProcessing::rawImageCallback, this, _1);
89 
90  set_camera_info_bis_service_callback_t set_camera_info_bis_callback = boost::bind(&ImageProcessing::setCameraInfoBisCallback, this, _1,_2);
91 
93  raw_image_callback);
94 
95  calibrate_service_ = n_.serviceClient<visp_camera_calibration::calibrate> (visp_camera_calibration::calibrate_service);
96 
99 
100  // read 3D model from parameters
101  XmlRpc::XmlRpcValue model_points_x_list;
102  XmlRpc::XmlRpcValue model_points_y_list;
103  XmlRpc::XmlRpcValue model_points_z_list;
107 
108  ROS_ASSERT(model_points_x_list.getType() == XmlRpc::XmlRpcValue::TypeArray);
109  ROS_ASSERT(model_points_x_list.size() == model_points_y_list.size() && model_points_x_list.size()==model_points_z_list.size());
110  for(int i=0;i<model_points_x_list.size();i++){
111  ROS_ASSERT(model_points_x_list[i].getType() == XmlRpc::XmlRpcValue::TypeDouble);
112  ROS_ASSERT(model_points_y_list[i].getType() == XmlRpc::XmlRpcValue::TypeDouble);
113  ROS_ASSERT(model_points_z_list[i].getType() == XmlRpc::XmlRpcValue::TypeDouble);
114  double X = static_cast<double>(model_points_x_list[i]);
115  double Y = static_cast<double>(model_points_y_list[i]);
116  double Z = static_cast<double>(model_points_z_list[i]);
117  vpPoint p;
118  p.setWorldCoordinates(X,Y,Z);
119  model_points_.push_back(p);
120  }
121 
122  //define selected model points
123  XmlRpc::XmlRpcValue selected_points_x_list;
124  XmlRpc::XmlRpcValue selected_points_y_list;
125  XmlRpc::XmlRpcValue selected_points_z_list;
126 
130  ROS_ASSERT(selected_points_x_list.size() == selected_points_y_list.size() && selected_points_x_list.size()==selected_points_z_list.size());
131 
132  for(int i=0;i<selected_points_x_list.size();i++){
133  ROS_ASSERT(selected_points_x_list[i].getType() == XmlRpc::XmlRpcValue::TypeDouble);
134  ROS_ASSERT(selected_points_y_list[i].getType() == XmlRpc::XmlRpcValue::TypeDouble);
135  ROS_ASSERT(selected_points_z_list[i].getType() == XmlRpc::XmlRpcValue::TypeDouble);
136  double X = static_cast<double>(selected_points_x_list[i]);
137  double Y = static_cast<double>(selected_points_y_list[i]);
138  double Z = static_cast<double>(selected_points_z_list[i]);
139  vpPoint p;
140  p.setWorldCoordinates(X,Y,Z);
141  selected_points_.push_back(p);
142  }
143 }
144 
146 {
147 if (! is_initialized) {
148  //init graphical interface
149  vpDisplay* disp = new vpDisplayX();
150  disp->init(img_);
151  disp->setTitle("Image processing initialisation interface");
152  vpDisplay::flush(img_);
153  vpDisplay::display(img_);
154  vpDisplay::displayCharString(img_,img_.getHeight()/2-10,img_.getWidth()/4,"Waiting for the camera feed.",vpColor::red);
155  vpDisplay::displayCharString(img_,img_.getHeight()/2+10,img_.getWidth()/4,"If you are using the example camera, you should click on it's window",vpColor::red);
156 
157  vpDisplay::flush(img_);
158 
159  //init camera
160  double px = cam_.get_px();
161  double py = cam_.get_px();
162  double u0 = img_.getWidth()/2;
163  double v0 = img_.getHeight()/2;
164  cam_.initPersProjWithoutDistortion(px, py, u0, v0);
165 
166  is_initialized = true;
167  }
168 }
169 
170 bool ImageProcessing::setCameraInfoBisCallback(sensor_msgs::SetCameraInfo::Request &req,
171  sensor_msgs::SetCameraInfo::Response &res){
172  std::string calibration_path;
174  ROS_INFO("saving calibration file to %s",calibration_path.c_str());
176  return true;
177 }
178 
179 void ImageProcessing::rawImageCallback(const sensor_msgs::Image::ConstPtr& image){
180  ros::Rate loop_rate(200);
181  double gray_level_precision;
182  double size_precision;
183  bool pause_at_each_frame = false; //Wait for user input each time a new frame is recieved.
184 
188 
189 
190  vpPose pose;
191  vpCalibration calib;
192  visp_camera_calibration::CalibPointArray calib_all_points;
193 
194  img_ = visp_bridge::toVispImage(*image);
195 
196  init();
197 
198  vpDisplay::display(img_);
199  vpDisplay::flush(img_);
200 
201  if(!pause_at_each_frame){
202  vpImagePoint ip;
203  vpDisplay::displayRectangle(img_,0,0,img_.getWidth(),15,vpColor::black,true);
204  vpDisplay::displayCharString(img_,10,10,"Click on the window to select the current image",vpColor::red);
205  vpDisplay::flush(img_);
206  if(pause_image_){
207  pause_image_= false;
208  }
209  else{
210  return;
211  }
212  }
213 
214  pose.clearPoint();
215  calib.clearPoint();
216  vpImagePoint ip;
217 
218  //lets the user select keypoints
219  for(unsigned int i=0;i<selected_points_.size();i++){
220  try{
221  vpDot2 d;
222  d.setGrayLevelPrecision(gray_level_precision);
223  d.setSizePrecision(size_precision);
224 
225  ROS_INFO("Click on point %d",i+1);
226  vpDisplay::displayRectangle(img_,0,0,img_.getWidth(),15,vpColor::black,true);
227  vpDisplay::displayCharString(img_,10,10,boost::str(boost::format("click on point %1%") % (i+1)).c_str(),vpColor::red);
228  vpDisplay::flush(img_);
229  while(ros::ok() && !vpDisplay::getClick(img_,ip,false));
230 
231  d.initTracking(img_, ip);
232 
233  ip.set_ij(d.getCog().get_i(),d.getCog().get_j());
234  double x=0,y=0;
235  vpPixelMeterConversion::convertPoint(cam_, ip, x, y);
236  selected_points_[i].set_x(x);
237  selected_points_[i].set_y(y);
238  pose.addPoint(selected_points_[i]);
239  calib.addPoint(selected_points_[i].get_oX(),selected_points_[i].get_oY(),selected_points_[i].get_oZ(), ip);
240 
241  vpDisplay::displayCross(img_, d.getCog(), 10, vpColor::red);
242  vpDisplay::flush(img_);
243  }catch(vpTrackingException e){
244  ROS_ERROR("Failed to init point");
245  }
246  }
247 
248 
249  vpHomogeneousMatrix cMo;
250  pose.computePose(vpPose::LAGRANGE, cMo) ;
251  pose.computePose(vpPose::VIRTUAL_VS, cMo) ;
252  vpHomogeneousMatrix cMoTmp = cMo;
253 
254  vpCameraParameters camTmp = cam_;
255  //compute local calibration to match the calibration grid with the image
256  try{
257  calib.computeCalibration(vpCalibration::CALIB_VIRTUAL_VS,cMoTmp,camTmp,false);
258  ROS_DEBUG_STREAM("cMo="<<std::endl <<cMoTmp<<std::endl);
259  ROS_DEBUG_STREAM("cam="<<std::endl <<camTmp<<std::endl);
260 
261  //project all points and track their corresponding image location
262  for (std::vector<vpPoint>::iterator model_point_iter= model_points_.begin();
263  model_point_iter!= model_points_.end() ;
264  model_point_iter++){
265  //project each model point into image according to current calibration
266  vpColVector _cP, _p ;
267 
268  model_point_iter->changeFrame(cMoTmp,_cP) ;
269  model_point_iter->projection(_cP,_p) ;
270  vpMeterPixelConversion::convertPoint(camTmp,_p[0],_p[1], ip);
271  if (10 < ip.get_u() && ip.get_u() < img_.getWidth()-10 &&
272  10 < ip.get_v() && ip.get_v() < img_.getHeight()-10) {
273  try {
274  //track the projected point, look for match
275  vpDot2 md;
276  md.setGrayLevelPrecision(gray_level_precision);
277  md.setSizePrecision(size_precision);
278 
279  md.initTracking(img_, ip);
280  if(!ros::ok())
281  return;
282 
283  vpRect bbox = md.getBBox();
284  vpImagePoint cog = md.getCog();
285  if(bbox.getLeft()<5 || bbox.getRight()>(double)img_.getWidth()-5 ||
286  bbox.getTop()<5 || bbox.getBottom()>(double)img_.getHeight()-5||
287  vpMath::abs(ip.get_u() - cog.get_u()) > 10 ||
288  vpMath::abs(ip.get_v() - cog.get_v()) > 10){
289  ROS_DEBUG("tracking failed[suspicious point location].");
290  }else{
291  //point matches
292  double x=0, y=0;
293  vpPixelMeterConversion::convertPoint(camTmp, cog, x, y) ;
294  model_point_iter->set_x(x) ;
295  model_point_iter->set_y(y) ;
296 
297  md.display(img_,vpColor::yellow, 2);
298  visp_camera_calibration::CalibPoint cp;
299  cp.i = cog.get_i();
300  cp.j = cog.get_j();
301  cp.X = model_point_iter->get_oX();
302  cp.Y = model_point_iter->get_oY();
303  cp.Z = model_point_iter->get_oZ();
304 
305  calib_all_points.points.push_back(cp);
306 
307  model_point_iter->display(img_,cMoTmp,camTmp) ;
308  loop_rate.sleep(); //To avoid refresh problems
309  vpDisplay::flush(img_);
310  }
311  } catch(...){
312  ROS_DEBUG("tracking failed.");
313  }
314  } else {
315  ROS_DEBUG("bad projection.");
316  }
317  }
318 
319  ROS_INFO("Left click on the interface window to continue, right click to restart");
320  vpDisplay::displayRectangle(img_,0,0,img_.getWidth(),15,vpColor::black,true);
321  vpDisplay::displayCharString(img_,10,10,"Left click on the interface window to continue, right click to restart",vpColor::red);
322  vpDisplay::flush(img_);
323 
324  vpMouseButton::vpMouseButtonType btn;
325  while(ros::ok() && !vpDisplay::getClick(img_,ip,btn, false));
326 
327  if(btn==vpMouseButton::button1)
328  point_correspondence_publisher_.publish(calib_all_points);
329  else{
330  rawImageCallback(image);
331  return;
332  }
333  }catch(...){
334  ROS_ERROR("calibration failed.");
335  }
336 }
337 
339 {
340  vpImagePoint ip;
341  while(ros::ok()){
342  ros::spinOnce();
343  if(vpDisplay::getClick(img_,ip,false))
344  pause_image_ = true;
345  }
347 }
348 
350 {
351  // TODO Auto-generated destructor stub
352 }
353 }
ServiceClient serviceClient(const std::string &service_name, bool persistent=false, const M_string &header_values=M_string())
std::string raw_image_topic
void publish(const boost::shared_ptr< M > &message) const
Subscriber subscribe(const std::string &topic, uint32_t queue_size, void(T::*fp)(M), T *obj, const TransportHints &transport_hints=TransportHints())
std::string size_precision_param
std::string selected_points_y_param
std::string model_points_z_param
int size() const
std::string model_points_x_param
vpImage< unsigned char > toVispImage(const sensor_msgs::Image &src)
ServiceServer advertiseService(const std::string &service, bool(T::*srv_func)(MReq &, MRes &), T *obj)
Type const & getType() const
boost::function< void(const sensor_msgs::Image::ConstPtr &)> raw_image_subscriber_callback_t
subscriber type declaration for raw_image topic subscriber
std::string point_correspondence_topic
std::string calibrate_service
void rawImageCallback(const sensor_msgs::Image::ConstPtr &image)
callback corresponding to the raw_image topic. Computes a cMo from selected points on the image...
std::string set_camera_info_bis_service
std::string gray_level_precision_param
ROSCPP_DECL bool get(const std::string &key, std::string &s)
#define ROS_INFO(...)
ROSCPP_DECL bool ok()
bool setCameraInfoBisCallback(sensor_msgs::SetCameraInfo::Request &req, sensor_msgs::SetCameraInfo::Response &res)
service displaying.
bool writeCalibration(const std::string &file_name, const std::string &camera_name, const sensor_msgs::CameraInfo &cam_info)
std::string model_points_y_param
std::string pause_at_each_frame_param
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
boost::function< bool(sensor_msgs::SetCameraInfo::Request &, sensor_msgs::SetCameraInfo::Response &res)> set_camera_info_bis_service_callback_t
service type declaration for calibrate service
ROSCPP_DECL bool getCached(const std::string &key, std::string &s)
#define ROS_DEBUG_STREAM(args)
bool sleep()
std::string calibration_path_param
#define ROS_ASSERT(cond)
ROSCPP_DECL void spinOnce()
std::string selected_points_z_param
#define ROS_ERROR(...)
std::string selected_points_x_param
ROSCPP_DECL void waitForShutdown()
#define ROS_DEBUG(...)


visp_camera_calibration
Author(s): Filip Novotny
autogenerated on Wed Jul 3 2019 19:48:03