calibrate_kinect_checkerboard.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2011, Willow Garage, Inc.
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions are met:
7  *
8  * * Redistributions of source code must retain the above copyright
9  * notice, this list of conditions and the following disclaimer.
10  * * Redistributions in binary form must reproduce the above copyright
11  * notice, this list of conditions and the following disclaimer in the
12  * documentation and/or other materials provided with the distribution.
13  * * Neither the name of the Willow Garage, Inc. nor the names of its
14  * contributors may be used to endorse or promote products derived from
15  * this software without specific prior written permission.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27  * POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #include <ros/ros.h>
32 
33 #include <cv_bridge/cv_bridge.h>
35 #include <tf/transform_listener.h>
37 
38 #include <pcl/point_types.h>
39 #include <pcl_ros/point_cloud.h>
40 #include <pcl_ros/transforms.h>
41 #include <pcl/registration/registration.h>
42 #include <pcl/registration/transformation_estimation_svd.h>
43 
44 #include <sensor_msgs/Image.h>
45 #include <sensor_msgs/CameraInfo.h>
46 #include <geometry_msgs/PointStamped.h>
47 
49 
50 using namespace std;
51 using namespace Eigen;
52 
53 tf::Transform tfFromEigen(Eigen::Matrix4f trans)
54 {
55  tf::Matrix3x3 btm;
56  btm.setValue(trans(0, 0), trans(0, 1), trans(0, 2),
57  trans(1, 0), trans(1, 1), trans(1, 2),
58  trans(2, 0), trans(2, 1), trans(2, 2));
59  tf::Transform ret;
60  ret.setOrigin(tf::Vector3(trans(0, 3), trans(1, 3), trans(2, 3)));
61  ret.setBasis(btm);
62  return ret;
63 }
64 
65 Eigen::Matrix4f EigenFromTF(tf::Transform trans)
66 {
67  Eigen::Matrix4f out;
68  tf::Quaternion quat = trans.getRotation();
69  tf::Vector3 origin = trans.getOrigin();
70 
71  Eigen::Quaternionf quat_out(quat.w(), quat.x(), quat.y(), quat.z());
72  Eigen::Vector3f origin_out(origin.x(), origin.y(), origin.z());
73 
74  out.topLeftCorner<3, 3>() = quat_out.toRotationMatrix();
75  out.topRightCorner<3, 1>() = origin_out;
76  out(3, 3) = 1;
77 
78  return out;
79 }
80 
82 {
83  // Nodes and publishers/subscribers
89 
90  // Image and camera info subscribers;
93 
94  // Structures for interacting with ROS messages
100 
101  // Calibration objects
103 
104  // The optimized transform
105  Eigen::Transform<float, 3, Eigen::Affine> transform_;
106 
107  // Visualization for markers
108  pcl::PointCloud<pcl::PointXYZ> detector_points_;
109  pcl::PointCloud<pcl::PointXYZ> ideal_points_;
110  pcl::PointCloud<pcl::PointXYZ> image_points_;
111  pcl::PointCloud<pcl::PointXYZ> physical_points_;
112 
113  // Have we calibrated the camera yet?
115 
117 
118  // Parameters
119  std::string fixed_frame;
120  std::string camera_frame;
121  std::string target_frame;
122  std::string tip_frame;
123  std::string touch_frame;
124 
128 
129  // Gripper tip position
130  geometry_msgs::PointStamped gripper_tip;
131 
132 public:
134  nh_("~"), it_(nh_), calibrated(false)
135  {
136  // Load parameters from the server.
137  nh_.param<std::string>("fixed_frame", fixed_frame, "/base_link");
138  nh_.param<std::string>("camera_frame", camera_frame, "/camera_link");
139  nh_.param<std::string>("target_frame", target_frame, "/calibration_pattern");
140  nh_.param<std::string>("tip_frame", tip_frame, "/gripper_link");
141 
142  nh_.param<int>("checkerboard_width", checkerboard_width, 6);
143  nh_.param<int>("checkerboard_height", checkerboard_height, 7);
144  nh_.param<double>("checkerboard_grid", checkerboard_grid, 0.027);
145 
146  // Set pattern detector sizes
147  pattern_detector_.setPattern(cv::Size(checkerboard_width, checkerboard_height),
148  checkerboard_grid, CHESSBOARD);
149 
150  transform_.translation().setZero();
151  transform_.matrix().topLeftCorner<3, 3>() = Quaternionf().setIdentity().toRotationMatrix();
152 
153  // Create subscriptions
154  info_sub_ = nh_.subscribe("/camera/rgb/camera_info", 1, &CalibrateKinectCheckerboard::infoCallback, this);
155 
156  // Also publishers
157  pub_ = it_.advertise("calibration_pattern_out", 1);
158  detector_pub_ = nh_.advertise<pcl::PointCloud<pcl::PointXYZ> >("detector_cloud", 1);
159  physical_pub_ = nh_.advertise<pcl::PointCloud<pcl::PointXYZ> >("physical_points_cloud", 1);
160 
161  // Create ideal points
162  ideal_points_.push_back(pcl::PointXYZ(0, 0, 0));
163  ideal_points_.push_back(pcl::PointXYZ((checkerboard_width - 1) * checkerboard_grid, 0, 0));
164  ideal_points_.push_back(pcl::PointXYZ(0, (checkerboard_height - 1) * checkerboard_grid, 0));
165  ideal_points_.push_back(pcl::PointXYZ((checkerboard_width - 1) * checkerboard_grid,
166  (checkerboard_height - 1) * checkerboard_grid, 0));
167 
168  // Create proper gripper tip point
169  nh_.param<double>("gripper_tip_x", gripper_tip.point.x, 0.0);
170  nh_.param<double>("gripper_tip_y", gripper_tip.point.y, 0.0);
171  nh_.param<double>("gripper_tip_z", gripper_tip.point.z, 0.0);
172  gripper_tip.header.frame_id = tip_frame;
173 
174  ROS_INFO("[calibrate] Initialized.");
175  }
176 
177  void infoCallback(const sensor_msgs::CameraInfoConstPtr& info_msg)
178  {
179  if (calibrated)
180  return;
181  cam_model_.fromCameraInfo(info_msg);
182  pattern_detector_.setCameraMatrices(cam_model_.intrinsicMatrix(), cam_model_.distortionCoeffs());
183 
184  calibrated = true;
185  image_sub_ = nh_.subscribe("/camera/rgb/image_raw", 1, &CalibrateKinectCheckerboard::imageCallback, this);
186 
187  ROS_INFO("[calibrate] Got image info!");
188  }
189 
190  void pointcloudCallback(const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr& msg)
191  {
192  sensor_msgs::ImagePtr image_msg(new sensor_msgs::Image);
193  sensor_msgs::PointCloud2 cloud;
194  pcl::toROSMsg(*msg, cloud);
195  pcl::toROSMsg(cloud, *image_msg);
196 
197  imageCallback(image_msg);
198  }
199 
200  void imageCallback(const sensor_msgs::ImageConstPtr& image_msg)
201  {
202  try
203  {
204  input_bridge_ = cv_bridge::toCvCopy(image_msg, "mono8");
205  output_bridge_ = cv_bridge::toCvCopy(image_msg, "bgr8");
206  }
207  catch (cv_bridge::Exception& ex)
208  {
209  ROS_ERROR("[calibrate] Failed to convert image:\n%s", ex.what());
210  return;
211  }
212 
213  Eigen::Vector3f translation;
214  Eigen::Quaternionf orientation;
215 
216  if (!pattern_detector_.detectPattern(input_bridge_->image, translation, orientation, output_bridge_->image))
217  {
218  ROS_INFO("[calibrate] Couldn't detect checkerboard, make sure it's visible in the image.");
219  return;
220  }
221 
222  tf::Transform target_transform;
223  tf::StampedTransform base_transform;
224  try
225  {
226  ros::Time acquisition_time = image_msg->header.stamp;
227  ros::Duration timeout(1.0 / 30.0);
228 
229  target_transform.setOrigin(tf::Vector3(translation.x(), translation.y(), translation.z()));
230  target_transform.setRotation(tf::Quaternion(orientation.x(), orientation.y(), orientation.z(), orientation.w()));
231  tf_broadcaster_.sendTransform(
232  tf::StampedTransform(target_transform, image_msg->header.stamp, image_msg->header.frame_id, target_frame));
233  }
234  catch (tf::TransformException& ex)
235  {
236  ROS_WARN("[calibrate] TF exception:\n%s", ex.what());
237  return;
238  }
239  publishCloud(ideal_points_, target_transform, image_msg->header.frame_id);
240 
241  overlayPoints(ideal_points_, target_transform, output_bridge_);
242 
243  // Publish calibration image
244  pub_.publish(output_bridge_->toImageMsg());
245 
246  pcl_ros::transformPointCloud(ideal_points_, image_points_, target_transform);
247 
248  cout << "Got an image callback!" << endl;
249 
250  calibrate(image_msg->header.frame_id);
251 
252  ros::shutdown();
253  }
254 
255  void publishCloud(pcl::PointCloud<pcl::PointXYZ> detector_points, tf::Transform &transform,
256  const std::string frame_id)
257  {
258  // Display to rviz
259  pcl::PointCloud < pcl::PointXYZ > transformed_detector_points;
260 
261  pcl_ros::transformPointCloud(detector_points, transformed_detector_points, transform);
262 
263  transformed_detector_points.header.frame_id = frame_id;
264  detector_pub_.publish(transformed_detector_points);
265  }
266 
267  void overlayPoints(pcl::PointCloud<pcl::PointXYZ> detector_points, tf::Transform &transform,
268  cv_bridge::CvImagePtr& image)
269  {
270  // Overlay calibration points on the image
271  pcl::PointCloud < pcl::PointXYZ > transformed_detector_points;
272 
273  pcl_ros::transformPointCloud(detector_points, transformed_detector_points, transform);
274 
275  int font_face = cv::FONT_HERSHEY_SCRIPT_SIMPLEX;
276  double font_scale = 1;
277  int thickness = 2;
278  int radius = 5;
279 
280  for (unsigned int i = 0; i < transformed_detector_points.size(); i++)
281  {
282  pcl::PointXYZ pt = transformed_detector_points[i];
283  cv::Point3d pt_cv(pt.x, pt.y, pt.z);
284  cv::Point2d uv;
285  uv = cam_model_.project3dToPixel(pt_cv);
286 
287  cv::circle(image->image, uv, radius, CV_RGB(255, 0, 0), -1);
288  cv::Size text_size;
289  int baseline = 0;
290  std::stringstream out;
291  out << i + 1;
292 
293  text_size = cv::getTextSize(out.str(), font_face, font_scale, thickness, &baseline);
294 
295  cv::Point origin = cvPoint(uv.x - text_size.width / 2, uv.y - radius - baseline/* - thickness*/);
296  cv::putText(image->image, out.str(), origin, font_face, font_scale, CV_RGB(255, 0, 0), thickness);
297  }
298  }
299 
300  bool calibrate(const std::string frame_id)
301  {
302  physical_points_.empty();
303  physical_points_.header.frame_id = fixed_frame;
304  cout << "Is the checkerboard correct? " << endl;
305  cout << "Move edge of gripper to point 1 in image and press Enter. " << endl;
306  cin.ignore();
307  addPhysicalPoint();
308  cout << "Move edge of gripper to point 2 in image and press Enter. " << endl;
309  cin.ignore();
310  addPhysicalPoint();
311  cout << "Move edge of gripper to point 3 in image and press Enter. " << endl;
312  cin.ignore();
313  addPhysicalPoint();
314  cout << "Move edge of gripper to point 4 in image and press Enter. " << endl;
315  cin.ignore();
316  addPhysicalPoint();
317 
318  Eigen::Matrix4f t;
319 
320  physical_pub_.publish(physical_points_);
321 
322  pcl::registration::TransformationEstimationSVD < pcl::PointXYZ, pcl::PointXYZ > svd_estimator;
323  svd_estimator.estimateRigidTransformation(physical_points_, image_points_, t);
324 
325  // Output
326  tf::Transform transform = tfFromEigen(t), trans_full, camera_transform_unstamped;
327  tf::StampedTransform camera_transform;
328 
329  cout << "Resulting transform (camera frame -> fixed frame): " << endl << t << endl << endl;
330 
331  try
332  {
333  tf_listener_.lookupTransform(frame_id, camera_frame, ros::Time(0), camera_transform);
334  }
335  catch (tf::TransformException& ex)
336  {
337  ROS_WARN("[calibrate] TF exception:\n%s", ex.what());
338  return false;
339  }
340 
341  camera_transform_unstamped = camera_transform;
342  trans_full = camera_transform_unstamped.inverse() * transform;
343 
344  Eigen::Matrix4f t_full = EigenFromTF(trans_full);
345  Eigen::Matrix4f t_full_inv = (Eigen::Transform<float, 3, Affine>(t_full).inverse()).matrix();
346 
347  cout << "Resulting transform (fixed frame -> camera frame): " << endl << t_full << endl << endl;
348  printStaticTransform(t_full_inv, fixed_frame, camera_frame);
349 
350  return true;
351  }
352 
353  void printStaticTransform(Eigen::Matrix4f& transform, const std::string frame1, const std::string frame2)
354  {
355  Eigen::Quaternionf quat(transform.topLeftCorner<3, 3>());
356  Eigen::Vector3f translation(transform.topRightCorner<3, 1>());
357 
358  cout << "Static transform publisher (use for external kinect): " << endl;
359 
360  cout << "rosrun tf static_transform_publisher x y z qx qy qz qw frame_id child_frame_id period_in_ms" << endl;
361  cout << "rosrun tf static_transform_publisher " << translation.x() << " " << translation.y() << " "
362  << translation.z() << " " << quat.x() << " " << quat.y() << " " << quat.z() << " " << quat.w()
363  << " " << frame1 << " " << frame2 << " 100" << endl << endl;
364 
365  tf::Transform temp_tf_trans = tfFromEigen(transform);
366 
367  double yaw, pitch, roll;
368 
369  std::string fixed_frame_urdf(fixed_frame);
370 
371  // If there's a leading '/' character, remove it, as xacro can't deal with
372  // extra characters in the link name.
373  if (fixed_frame_urdf.size() > 0 && fixed_frame_urdf[0] == '/')
374  fixed_frame_urdf.erase(0, 1);
375 
376  temp_tf_trans.getBasis().getEulerYPR(yaw, pitch, roll);
377 
378  cout << "URDF output (use for kinect on robot): " << endl;
379 
380  cout << "<?xml version=\"1.0\"?>\n<robot>\n" << "\t<property name=\"turtlebot_calib_cam_x\" value=\""
381  << translation.x() << "\" />\n" << "\t<property name=\"turtlebot_calib_cam_y\" value=\"" << translation.y()
382  << "\" />\n" << "\t<property name=\"turtlebot_calib_cam_z\" value=\"" << translation.z() << "\" />\n"
383  << "\t<property name=\"turtlebot_calib_cam_rr\" value=\"" << roll << "\" />\n"
384  << "\t<property name=\"turtlebot_calib_cam_rp\" value=\"" << pitch << "\" />\n"
385  << "\t<property name=\"turtlebot_calib_cam_ry\" value=\"" << yaw << "\" />\n"
386  << "\t<property name=\"turtlebot_kinect_frame_name\" value=\"" << fixed_frame_urdf << "\" />\n" << "</robot>"
387  << endl << endl;
388  }
389 
391  {
392  geometry_msgs::PointStamped pt_out;
393 
394  try
395  {
396  tf_listener_.transformPoint(fixed_frame, gripper_tip, pt_out);
397  }
398  catch (tf::TransformException& ex)
399  {
400  ROS_WARN("[calibrate] TF exception:\n%s", ex.what());
401  return;
402  }
403 
404  physical_points_.push_back(pcl::PointXYZ(pt_out.point.x, pt_out.point.y, pt_out.point.z));
405  }
406 
408  {
409  detector_points_.points.resize(pattern_detector_.ideal_points.size());
410  for (unsigned int i = 0; i < pattern_detector_.ideal_points.size(); i++)
411  {
412  cv::Point3f pt = pattern_detector_.ideal_points[i];
413  detector_points_[i].x = pt.x;
414  detector_points_[i].y = pt.y;
415  detector_points_[i].z = pt.z;
416  }
417  }
418 
419 };
420 
421 int main(int argc, char** argv)
422 {
423  ros::init(argc, argv, "calibrate_kinect_arm");
424 
426  ros::spin();
427 }
const cv::Matx33d & intrinsicMatrix() const
TFSIMD_FORCE_INLINE void setRotation(const Quaternion &q)
void imageCallback(const sensor_msgs::ImageConstPtr &image_msg)
void publish(const boost::shared_ptr< M > &message) const
Subscriber subscribe(const std::string &topic, uint32_t queue_size, void(T::*fp)(M), T *obj, const TransportHints &transport_hints=TransportHints())
void getEulerYPR(tfScalar &yaw, tfScalar &pitch, tfScalar &roll, unsigned int solution_number=1) const
pcl::PointCloud< pcl::PointXYZ > ideal_points_
TFSIMD_FORCE_INLINE Matrix3x3 & getBasis()
void setCameraMatrices(cv::Matx33d K_, cv::Mat D_)
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
Publisher advertise(const std::string &base_topic, uint32_t queue_size, bool latch=false)
pcl::PointCloud< pcl::PointXYZ > detector_points_
pcl::PointCloud< pcl::PointXYZ > physical_points_
void transformPoint(const std::string &target_frame, const geometry_msgs::PointStamped &stamped_in, geometry_msgs::PointStamped &stamped_out) const
void imageCallback(const sensor_msgs::ImageConstPtr &msg)
void setValue(const tfScalar &xx, const tfScalar &xy, const tfScalar &xz, const tfScalar &yx, const tfScalar &yy, const tfScalar &yz, const tfScalar &zx, const tfScalar &zy, const tfScalar &zz)
pcl::PointCloud< pcl::PointXYZ > image_points_
#define ROS_WARN(...)
int main(int argc, char **argv)
image_transport::ImageTransport it_
ROSCPP_DECL void spin(Spinner &spinner)
void infoCallback(const sensor_msgs::CameraInfoConstPtr &info_msg)
Eigen::Transform< float, 3, Eigen::Affine > transform_
tf::Transform tfFromEigen(Eigen::Matrix4f trans)
TFSIMD_FORCE_INLINE const tfScalar & x() const
void setPattern(cv::Size grid_size_, float square_size_, Pattern pattern_type_, cv::Point3f offset_=cv::Point3f())
TFSIMD_FORCE_INLINE const tfScalar & z() const
void publish(const sensor_msgs::Image &message) const
bool calibrate(const std::string frame_id)
#define ROS_INFO(...)
bool fromCameraInfo(const sensor_msgs::CameraInfo &msg)
bool param(const std::string &param_name, T &param_val, const T &default_val) const
TFSIMD_FORCE_INLINE Quaternion inverse(const Quaternion &q)
CvImagePtr toCvCopy(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
TFSIMD_FORCE_INLINE const tfScalar & y() const
void sendTransform(const StampedTransform &transform)
void pointcloudCallback(const pcl::PointCloud< pcl::PointXYZRGB >::ConstPtr &msg)
Transform inverse() const
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
TFSIMD_FORCE_INLINE void setBasis(const Matrix3x3 &basis)
image_geometry::PinholeCameraModel cam_model_
TFSIMD_FORCE_INLINE Vector3 & getOrigin()
void lookupTransform(const std::string &target_frame, const std::string &source_frame, const ros::Time &time, StampedTransform &transform) const
void printStaticTransform(Eigen::Matrix4f &transform, const std::string frame1, const std::string frame2)
void overlayPoints(pcl::PointCloud< pcl::PointXYZ > detector_points, tf::Transform &transform, cv_bridge::CvImagePtr &image)
Quaternion getRotation() const
void transformPointCloud(const pcl::PointCloud< PointT > &cloud_in, pcl::PointCloud< PointT > &cloud_out, const tf::Transform &transform)
void toROSMsg(const sensor_msgs::PointCloud2 &cloud, sensor_msgs::Image &image)
const cv::Mat_< double > & distortionCoeffs() const
ROSCPP_DECL void shutdown()
cv::Point2d project3dToPixel(const cv::Point3d &xyz) const
#define ROS_ERROR(...)
Eigen::Matrix4f EigenFromTF(tf::Transform trans)
int detectPattern(cv::Mat &image_in, Eigen::Vector3f &translation, Eigen::Quaternionf &orientation, cv::Mat &image_out)
TFSIMD_FORCE_INLINE void setOrigin(const Vector3 &origin)
void publishCloud(pcl::PointCloud< pcl::PointXYZ > detector_points, tf::Transform &transform, const std::string frame_id)


turtlebot_arm_kinect_calibration
Author(s): Michael Ferguson, Helen Oleynikova
autogenerated on Fri Feb 7 2020 03:56:29