image_flip.cpp
Go to the documentation of this file.
1 /*
2  * Copyright 2017 Fraunhofer Institute for Manufacturing Engineering and Automation (IPA)
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9 
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 
18 //##################
19 //#### includes ####
20 
22 #include <geometry_msgs/TransformStamped.h>
23 
24 namespace cob_image_flip
25 {
27 : node_handle_(nh), img_sub_counter_(0), pc_sub_counter_(0), disparity_sub_counter_(0), transform_listener_(nh), it_(0), last_rotation_angle_(0), last_rotation_factor_(0)
28 {
29  // set parameters
30  ROS_DEBUG_STREAM("\n--------------------------\nImage Flip Parameters:\n--------------------------");
31  node_handle_.param("rotation_mode", rotation_mode_, 0);
32  ROS_DEBUG_STREAM("rotation_mode = " << rotation_mode_);
34  {
35  // fixed rotation angle
36  node_handle_.param("rotation_angle", rotation_angle_, 0.);
37  ROS_DEBUG_STREAM("rotation_angle = " << rotation_angle_);
38  }
40  {
41  // automatic rotation in gravity direction
42  node_handle_.param("reference_frame", reference_frame_, std::string(""));
43  ROS_DEBUG_STREAM("reference_frame = " << reference_frame_);
44  }
45  else
46  {
47  ROS_ERROR("Unsupported value for parameter 'rotation_mode'. Exiting the cob_image_flip.");
48  exit(1);
49  }
50  node_handle_.param("flip_color_image", flip_color_image_, false);
51  ROS_DEBUG_STREAM("flip_color_image = " << flip_color_image_);
52  node_handle_.param("flip_pointcloud", flip_pointcloud_, false);
53  ROS_DEBUG_STREAM("flip_pointcloud = " << flip_pointcloud_);
54  node_handle_.param("flip_disparity_image", flip_disparity_image_, false);
55  ROS_DEBUG_STREAM("flip_disparity_image = " << flip_disparity_image_);
56  node_handle_.param("display_warnings", display_warnings_, false);
57  ROS_DEBUG_STREAM("display_warnings = " << display_warnings_);
58 
59  if (flip_color_image_ == true)
60  {
62  color_camera_image_sub_.registerCallback(boost::bind(&ImageFlip::imageCallback, this, _1));
63  color_camera_image_pub_ = it_->advertise("colorimage_out", 1, boost::bind(&ImageFlip::imgConnectCB, this, _1), boost::bind(&ImageFlip::imgDisconnectCB, this, _1));
64  color_camera_image_2d_transform_pub_ = node_handle_.advertise<cob_perception_msgs::Float64ArrayStamped>("colorimage_inplane_transform", 1,false);
65  }
66 
67  // point cloud flip
68  if (flip_pointcloud_ == true)
69  {
70  point_cloud_pub_ = node_handle_.advertise<sensor_msgs::PointCloud2>("pointcloud_out", 1, boost::bind(&ImageFlip::pcConnectCB, this, _1), boost::bind(&ImageFlip::pcDisconnectCB, this, _1));
71  point_cloud_2d_transform_pub_ = node_handle_.advertise<cob_perception_msgs::Float64ArrayStamped>("pointcloud_inplane_transform", 1,false);
72  }
73 
74  if (flip_disparity_image_ == true)
75  {
76  disparity_image_pub_ = node_handle_.advertise<stereo_msgs::DisparityImage>("disparityimage_out", 1, boost::bind(&ImageFlip::disparityConnectCB, this, _1), boost::bind(&ImageFlip::disparityDisconnectCB, this, _1));
77  disparity_image_2d_transform_pub_ = node_handle_.advertise<cob_perception_msgs::Float64ArrayStamped>("disparityimage_inplane_transform", 1,false);
78  }
79 
80  ROS_DEBUG_STREAM("ImageFlip initialized.");
81 }
82 
84 {
85  if (it_ != 0)
86  delete it_;
87 }
88 
89 
90 double ImageFlip::determineRotationAngle(const std::string& camera_frame_id, const ros::Time& time)
91 {
92  double rotation_angle = 0.;
94  {
95  rotation_angle = rotation_angle_;
96  }
98  {
99  // check camera link orientation and decide whether image must be turned around
100  try
101  {
102  // compute angle of camera x-axis against x-y world plane (i.e. in reference coordinates)
103  tf::Stamped<tf::Vector3> x_axis_camera(tf::Vector3(1, 0, 0), time /*ros::Time(0)*/, camera_frame_id), x_axis_ref;
104  tf::Stamped<tf::Vector3> y_axis_camera(tf::Vector3(0, 1, 0), x_axis_camera.stamp_, camera_frame_id), y_axis_ref;
105  transform_listener_.waitForTransform(reference_frame_, camera_frame_id, x_axis_camera.stamp_, ros::Duration(0.2));
106  transform_listener_.transformVector(reference_frame_, x_axis_camera, x_axis_ref);
107  transform_listener_.transformVector(reference_frame_, y_axis_camera, y_axis_ref);
108  // compute the rotation angle so that the x-axis of the rotated image has 0 z-coordinate in the reference coordinate system (i.e. x-axis of rotated image is then parallel to the ground)
109  // the rotation is around the z-axis of the camera system
110  // 1. Compute the intersection between
111  // E_1: x-y-plane in reference system coordinates: z=0
112  // E_2: x-y-plane of camera system (but without translation, just the rotational part): [x,y,z] = [0,0,0] + r*[x_axis_ref.x,x_axis_ref.y,x_axis_ref.z] + s*[y_axis_ref.x,y_axis_ref.y,y_axis_ref.z]
113  // --> r = -s*y_axis_ref.z/x_axis_ref.z
114  // --> line equation: x = s*[y_axis_ref.x-x_axis_ref.x*y_axis_ref.z/x_axis_ref.z,y_axis_ref.y-x_axis_ref.y*y_axis_ref.z/x_axis_ref.z,0]
115  // 2. Compute the target camera x-axis, which is parallel to the ground. The vector for x is given by the line equation, the direction of the target y-axis decides about the direction (+ or -).
116  // 3. Compute the rotation between camera x-axis and target camera x-axis (i.e. between x_axis_ref and x_axis_target)
117  if (x_axis_ref.z()!=0) // do not compute a rotation if the camera's x-axis is already correctly aligned
118  {
119  // 1. line intersection
120  const double a = y_axis_ref.z()/x_axis_ref.z();
121  tf::Vector3 x_axis_target(y_axis_ref.x()-x_axis_ref.x()*a,y_axis_ref.y()-x_axis_ref.y()*a, 0); // this is the line of intersection
122  x_axis_target.normalize();
123  // 2. resolve direction ambiguity
124  tf::Vector3 z_axis_target = x_axis_ref.cross(y_axis_ref); // remark: z_axis_ref == z_axis_target
125  tf::Vector3 y_axis_target = z_axis_target.cross(x_axis_target);
126  y_axis_target.normalize();
127 
128  //std::cout << "\n\nx_axis_ref=" << x_axis_ref.x() << ", " << x_axis_ref.y() << ", " << x_axis_ref.z()
129  // << "\ny_axis_ref=" << y_axis_ref.x() << ", " << y_axis_ref.y() << ", " << y_axis_ref.z()
130  // << "\nx_axis_target=" << x_axis_target.x() << ", " << x_axis_target.y() << ", " << x_axis_target.z()
131  // << "\ny_axis_target=" << y_axis_target.x() << ", " << y_axis_target.y() << ", " << y_axis_target.z() << "\n" << std::endl;
132 
133  // compute a factor than rotates the image in a way that the new y-axis in the rotated image directs against the z-direction of the reference system (i.e. points downwards)
134  int factor = (y_axis_target.z()<0. ? 1 : -1);
135  if (factor != last_rotation_factor_ && fabs(y_axis_target.z()) < 0.01) // this hysteresis stops continuous flipping near 0
136  factor = last_rotation_factor_;
137  last_rotation_factor_ = factor;
138  x_axis_target *= factor;
139 
140  //std::cout << "x_axis_target factored=" << x_axis_target.x() << ", " << x_axis_target.y() << ", " << x_axis_target.z() << "\n" << std::endl;
141 
142  // 3. compute angle
143  tf::Vector3 rot_axis_x = x_axis_ref.cross(x_axis_target);
144  double rot_sin = ((rot_axis_x.dot(z_axis_target)) >= 0 ? 1 : -1) * rot_axis_x.length(); // sign of sin() depends on alignment of cross-product rotation axis with z-axis
145  double rot_cos = x_axis_ref.dot(x_axis_target);
146  rotation_angle = -180./CV_PI * atan2(rot_sin, rot_cos);
147 
148  //std::cout << "rot_sin=" << rot_sin << "\trot_cos=" << rot_cos << "\trotation_angle=" << rotation_angle << "\n=========================================\n" << std::endl;
149  }
151  rotation_angle = 90. * cvRound(rotation_angle*1./90.);
152  last_rotation_angle_ = rotation_angle;
153 
154 // tf::StampedTransform transform;
155 // transform_listener_.lookupTransform(reference_frame_, camera_frame_, ros::Time(0), transform);
156 // tfScalar roll, pitch, yaw;
157 // transform.getBasis().getRPY(roll, pitch, yaw, 1);
158 // std::cout << "xyz: " << transform.getOrigin().getX() << " " << transform.getOrigin().getY() << " " << transform.getOrigin().getZ() << "\n";
159 // std::cout << "abcw: " << transform.getRotation().getX() << " " << transform.getRotation().getY() << " " << transform.getRotation().getZ() << " " << transform.getRotation().getW() << "\n";
160  }
161  catch (tf2::TransformException& ex)
162  {
163  if (display_warnings_ == true)
164  ROS_DEBUG("%s",ex.what());
165  rotation_angle = last_rotation_angle_;
166  }
167  }
168  else
169  {
170  if (display_warnings_)
171  ROS_WARN("ImageFlip::imageCallback: Unsupported rotation mode.");
172  }
173 
174  return rotation_angle;
175 }
176 
177 
178 bool ImageFlip::convertImageMessageToMat(const sensor_msgs::Image::ConstPtr& image_msg, cv_bridge::CvImageConstPtr& image_ptr, cv::Mat& image)
179 {
180  try
181  {
182  image_ptr = cv_bridge::toCvShare(image_msg, image_msg->encoding);
183  }
184  catch (cv_bridge::Exception& e)
185  {
186  ROS_ERROR("ImageFlip::convertColorImageMessageToMat: cv_bridge exception: %s", e.what());
187  return false;
188  }
189  image = image_ptr->image;
190 
191  return true;
192 }
193 
194 
195 void ImageFlip::imageCallback(const sensor_msgs::ImageConstPtr& color_image_msg)
196 {
197  // read image
198  cv_bridge::CvImageConstPtr color_image_ptr;
199  cv::Mat color_image;
200  if (convertImageMessageToMat(color_image_msg, color_image_ptr, color_image) == false)
201  return;
202  cv::Mat color_image_turned;
203  cv::Mat rot_mat = cv::Mat::zeros(2,3,CV_64FC1);
204 
205  // determine rotation angle
206  double rotation_angle = determineRotationAngle(color_image_msg->header.frame_id, color_image_msg->header.stamp);
207 
208  // rotate
209  // fast hard coded rotations
210  if (rotation_angle==0. || rotation_angle==360. || rotation_angle==-360.)
211  {
212  color_image_turned = color_image;
213  rot_mat.at<double>(0,0) = 1.;
214  rot_mat.at<double>(1,1) = 1.;
215  }
216  else if (rotation_angle==90. || rotation_angle==-270.)
217  {
218  // rotate images by 90 degrees
219  color_image_turned.create(color_image.cols, color_image.rows, color_image.type());
220  if (color_image.type() != CV_8UC3)
221  {
222  ROS_ERROR("ImageFlip::imageCallback: Error: The image format of the color image is not CV_8UC3.\n");
223  return;
224  }
225  for (int v = 0; v < color_image_turned.rows; v++)
226  for (int u = 0; u < color_image_turned.cols; u++)
227  color_image_turned.at<cv::Vec3b>(v,u) = color_image.at<cv::Vec3b>(color_image.rows-1-u,v);
228  rot_mat.at<double>(0,1) = -1.;
229  rot_mat.at<double>(0,2) = color_image.rows;
230  rot_mat.at<double>(1,0) = 1.;
231  }
232  else if (rotation_angle==270. || rotation_angle==-90.)
233  {
234  // rotate images by 270 degrees
235  color_image_turned.create(color_image.cols, color_image.rows, color_image.type());
236  if (color_image.type() != CV_8UC3)
237  {
238  std::cout << "ImageFlip::imageCallback: Error: The image format of the color image is not CV_8UC3.\n";
239  return;
240  }
241  for (int v = 0; v < color_image_turned.rows; v++)
242  for (int u = 0; u < color_image_turned.cols; u++)
243  color_image_turned.at<cv::Vec3b>(v,u) = color_image.at<cv::Vec3b>(u,color_image.cols-1-v);
244  rot_mat.at<double>(0,1) = 1.;
245  rot_mat.at<double>(1,0) = -1.;
246  rot_mat.at<double>(1,2) = color_image.cols;
247  }
248  else if (rotation_angle==180 || rotation_angle==-180)
249  {
250  // rotate images by 180 degrees
251  color_image_turned.create(color_image.rows, color_image.cols, color_image.type());
252  if (color_image.type() != CV_8UC3)
253  {
254  std::cout << "ImageFlip::imageCallback: Error: The image format of the color image is not CV_8UC3.\n";
255  return;
256  }
257  for (int v = 0; v < color_image.rows; v++)
258  {
259  uchar* src = color_image.ptr(v);
260  uchar* dst = color_image_turned.ptr(color_image.rows - v - 1) + 3 * (color_image.cols - 1);
261  for (int u = 0; u < color_image.cols; u++)
262  {
263  for (int k = 0; k < 3; k++)
264  {
265  *dst = *src;
266  src++;
267  dst++;
268  }
269  dst -= 6;
270  }
271  }
272  rot_mat.at<double>(0,0) = -1.;
273  rot_mat.at<double>(0,2) = color_image.cols;
274  rot_mat.at<double>(1,1) = -1.;
275  rot_mat.at<double>(1,2) = color_image.rows;
276  }
277  else
278  {
279  // arbitrary rotation
280  bool switch_aspect_ratio = !(fabs(sin(rotation_angle*CV_PI/180.)) < 0.707106781);
281  if (switch_aspect_ratio==false)
282  color_image_turned.create(color_image.rows, color_image.cols, color_image.type()); // automatically decide for landscape or portrait orientation of resulting image
283  else
284  color_image_turned.create(color_image.cols, color_image.rows, color_image.type());
285  if (color_image.type() != CV_8UC3)
286  {
287  ROS_ERROR("ImageFlip::imageCallback: Error: The image format of the color image is not CV_8UC3.\n");
288  return;
289  }
290 
291  cv::Point center = cv::Point(color_image.cols/2, color_image.rows/2);
292  rot_mat = cv::getRotationMatrix2D(center, -rotation_angle, 1.0);
293  if (switch_aspect_ratio==true)
294  {
295  rot_mat.at<double>(0,2) += 0.5*(color_image_turned.cols-color_image.cols);
296  rot_mat.at<double>(1,2) += 0.5*(color_image_turned.rows-color_image.rows);
297  }
298  cv::warpAffine(color_image, color_image_turned, rot_mat, color_image_turned.size());
299  }
300 
301  // publish turned image
302  cv_bridge::CvImage cv_ptr;
303  cv_ptr.image = color_image_turned;
304  cv_ptr.encoding = color_image_msg->encoding;
305  sensor_msgs::Image::Ptr color_image_turned_msg = cv_ptr.toImageMsg();
306  color_image_turned_msg->header = color_image_msg->header;
307  color_camera_image_pub_.publish(color_image_turned_msg);
308 
309  // publish rotation matrix for backward transform to non-turned coordinates using a homogeneous image coordinate representation
310  // the original, non-turned coordinates are of interest if the camera calibration shall be used (the calibration does not apply to coordinates of the turned image)
311  cv::Mat rot33 = cv::Mat::eye(3,3,CV_64FC1);
312  for (int r=0; r<2; ++r)
313  for (int c=0; c<3; ++c)
314  rot33.at<double>(r,c) = rot_mat.at<double>(r,c);
315  cv::Mat rot33_inv = rot33.inv();
316  cob_perception_msgs::Float64ArrayStamped rot33_inv_msg;
317  rot33_inv_msg.header = color_image_msg->header;
318  rot33_inv_msg.data.resize(9);
319  for (int r=0; r<3; ++r)
320  for (int c=0; c<3; ++c)
321  rot33_inv_msg.data[r*3+c] = rot33_inv.at<double>(r,c);
323 }
324 
326 {
328  if (img_sub_counter_ == 1)
329  {
330  ROS_DEBUG("ImageFlip::imgConnectCB: Connecting image callback.");
331  color_camera_image_sub_.subscribe(*it_, "colorimage_in", 1);
332  }
333 }
334 
336 {
338  if (img_sub_counter_ == 0)
339  {
340  ROS_DEBUG("ImageFlip::imgDisconnectCB: Disconnecting image callback.");
342  }
343 }
344 
345 
346 void ImageFlip::pcCallback(const sensor_msgs::PointCloud2::ConstPtr& point_cloud_msg)
347 {
348  // determine rotation angle
349  double rotation_angle = determineRotationAngle(point_cloud_msg->header.frame_id, point_cloud_msg->header.stamp);
350  cv::Mat rot_mat = cv::Mat::zeros(2,3,CV_64FC1);
351 
352  // prepare turned point cloud
353  const int element_size = point_cloud_msg->point_step; // length of point in bytes
354  const int row_size = point_cloud_msg->row_step; // length of row in bytes
355  sensor_msgs::PointCloud2::Ptr point_cloud_out_msg = boost::shared_ptr<sensor_msgs::PointCloud2>(new sensor_msgs::PointCloud2);
356  point_cloud_out_msg->fields = point_cloud_msg->fields;
357  point_cloud_out_msg->header = point_cloud_msg->header;
358  point_cloud_out_msg->height = point_cloud_msg->height;
359  point_cloud_out_msg->width = point_cloud_msg->width;
360  point_cloud_out_msg->point_step = point_cloud_msg->point_step;
361  point_cloud_out_msg->row_step = point_cloud_msg->row_step;
362  point_cloud_out_msg->is_bigendian = point_cloud_msg->is_bigendian;
363  point_cloud_out_msg->is_dense = point_cloud_msg->is_dense;
364  point_cloud_out_msg->data.resize(point_cloud_out_msg->height * point_cloud_out_msg->width * element_size);
365 
366  // rotate
367  // fast hard coded rotations
368  if (rotation_angle==0. || rotation_angle==360. || rotation_angle==-360.)
369  {
370  memcpy(&(point_cloud_out_msg->data[0]), &(point_cloud_msg->data[0]), point_cloud_msg->height*point_cloud_msg->width*element_size);
371  rot_mat.at<double>(0,0) = 1.;
372  rot_mat.at<double>(1,1) = 1.;
373  }
374  else if (rotation_angle==90. || rotation_angle==-270.)
375  {
376  // rotate images by 90 degrees
377  point_cloud_out_msg->height = point_cloud_msg->width;
378  point_cloud_out_msg->width = point_cloud_msg->height;
379  const int row_size_out = point_cloud_msg->height*element_size; // length of row in bytes
380  point_cloud_out_msg->row_step = row_size_out;
381  for (int v = 0; v < point_cloud_out_msg->height; v++)
382  for (int u = 0; u < point_cloud_out_msg->width; u++)
383  memcpy(&(point_cloud_out_msg->data[v*row_size_out+u*element_size]), &(point_cloud_msg->data[(point_cloud_msg->height-1-u)*row_size+v*element_size]), element_size);
384  rot_mat.at<double>(0,1) = -1.;
385  rot_mat.at<double>(0,2) = point_cloud_msg->height;
386  rot_mat.at<double>(1,0) = 1.;
387  }
388  else if (rotation_angle==270. || rotation_angle==-90.)
389  {
390  // rotate images by 270 degrees
391  point_cloud_out_msg->height = point_cloud_msg->width;
392  point_cloud_out_msg->width = point_cloud_msg->height;
393  const int row_size_out = point_cloud_msg->height*element_size; // length of row in bytes
394  point_cloud_out_msg->row_step = row_size_out;
395  for (int v = 0; v < point_cloud_out_msg->height; v++)
396  for (int u = 0; u < point_cloud_out_msg->width; u++)
397  memcpy(&(point_cloud_out_msg->data[v*row_size_out+u*element_size]), &(point_cloud_msg->data[u*row_size+(point_cloud_msg->width-1-v)*element_size]), element_size);
398  rot_mat.at<double>(0,1) = 1.;
399  rot_mat.at<double>(1,0) = -1.;
400  rot_mat.at<double>(1,2) = point_cloud_msg->width;
401  }
402  else if (rotation_angle==180 || rotation_angle==-180)
403  {
404  // rotate images by 180 degrees
405  for (int v = 0; v < point_cloud_out_msg->height; v++)
406  for (int u = 0; u < point_cloud_out_msg->width; u++)
407  memcpy(&(point_cloud_out_msg->data[v*row_size+u*element_size]), &(point_cloud_msg->data[(point_cloud_msg->height-1-v)*row_size+(point_cloud_msg->width-1-u)*element_size]), element_size);
408  rot_mat.at<double>(0,0) = -1.;
409  rot_mat.at<double>(0,2) = point_cloud_msg->width;
410  rot_mat.at<double>(1,1) = -1.;
411  rot_mat.at<double>(1,2) = point_cloud_msg->height;
412  }
413  else
414  {
415  // arbitrary rotation
416  int row_size_out = row_size;
417  // automatically decide for landscape or portrait orientation of resulting image
418  bool switch_aspect_ratio = !(fabs(sin(rotation_angle*CV_PI/180.)) < 0.707106781);
419  if (switch_aspect_ratio==true)
420  {
421  point_cloud_out_msg->height = point_cloud_msg->width;
422  point_cloud_out_msg->width = point_cloud_msg->height;
423  row_size_out = point_cloud_msg->height*element_size; // length of row in bytes
424  point_cloud_out_msg->row_step = row_size_out;
425  }
426 
427  // compute transform
428  cv::Point center = cv::Point(point_cloud_msg->width/2, point_cloud_msg->height/2);
429  rot_mat = cv::getRotationMatrix2D(center, -rotation_angle, 1.0);
430  if (switch_aspect_ratio==true)
431  {
432  rot_mat.at<double>(0,2) += 0.5*((double)point_cloud_out_msg->width - (double)point_cloud_msg->width);
433  rot_mat.at<double>(1,2) += 0.5*((double)point_cloud_out_msg->height - (double)point_cloud_msg->height);
434  }
435  cv::Mat rot_mat_inv = rot_mat.clone();
436  rot_mat_inv.at<double>(0,1) = rot_mat.at<double>(1,0);
437  rot_mat_inv.at<double>(1,0) = rot_mat.at<double>(0,1);
438  rot_mat_inv.at<double>(0,2) = -rot_mat.at<double>(0,2)*rot_mat.at<double>(0,0) - rot_mat.at<double>(1,2)*rot_mat.at<double>(1,0);
439  rot_mat_inv.at<double>(1,2) = -rot_mat.at<double>(1,2)*rot_mat.at<double>(0,0) + rot_mat.at<double>(0,2)*rot_mat.at<double>(1,0);
440 
441  // zero element
442  std::vector<uchar> zero_element(element_size, 0);
443  for (size_t i=0; i<point_cloud_msg->fields.size(); ++i)
444  {
445  if (point_cloud_msg->fields[i].name.compare("x") == 0 || point_cloud_msg->fields[i].name.compare("y") == 0 || point_cloud_msg->fields[i].name.compare("z") == 0)
446  {
447  float* val = (float*)&(zero_element[point_cloud_msg->fields[i].offset]);
448  *val = std::numeric_limits<float>::quiet_NaN();
449  }
450  if (point_cloud_msg->fields[i].name.compare("rgb") == 0)
451  {
452  float* val = (float*)&(zero_element[point_cloud_msg->fields[i].offset]);
453  int vali = 0;
454  *val = *(float*)(&vali);
455  }
456  }
457 
458  // warp (nearest neighbor mode, no interpolation)
459  for (int v = 0; v < point_cloud_out_msg->height; v++)
460  {
461  for (int u = 0; u < point_cloud_out_msg->width; u++)
462  {
463  int src_u = rot_mat_inv.at<double>(0,0)*u + rot_mat_inv.at<double>(0,1)*v + rot_mat_inv.at<double>(0,2);
464  int src_v = rot_mat_inv.at<double>(1,0)*u + rot_mat_inv.at<double>(1,1)*v + rot_mat_inv.at<double>(1,2);
465  if (src_u < 0 || src_u > point_cloud_msg->width-1 || src_v < 0 || src_v > point_cloud_msg->height-1)
466  memcpy(&(point_cloud_out_msg->data[v*row_size_out+u*element_size]), &(zero_element[0]), element_size);
467  else
468  memcpy(&(point_cloud_out_msg->data[v*row_size_out+u*element_size]), &(point_cloud_msg->data[src_v*row_size+src_u*element_size]), element_size);
469  }
470  }
471  }
472 
473  // publish turned point cloud
474  point_cloud_pub_.publish(point_cloud_out_msg);
475 
476  // publish rotation matrix for backward transform to non-turned coordinates using a homogeneous image coordinate representation
477  // the original, non-turned coordinates are of interest if the camera calibration shall be used (the calibration does not apply to coordinates of the turned image)
478  cv::Mat rot33 = cv::Mat::eye(3,3,CV_64FC1);
479  for (int r=0; r<2; ++r)
480  for (int c=0; c<3; ++c)
481  rot33.at<double>(r,c) = rot_mat.at<double>(r,c);
482  cv::Mat rot33_inv = rot33.inv();
483  cob_perception_msgs::Float64ArrayStamped rot33_inv_msg;
484  rot33_inv_msg.header = point_cloud_msg->header;
485  rot33_inv_msg.data.resize(9);
486  for (int r=0; r<3; ++r)
487  for (int c=0; c<3; ++c)
488  rot33_inv_msg.data[r*3+c] = rot33_inv.at<double>(r,c);
489  point_cloud_2d_transform_pub_.publish(rot33_inv_msg);
490 
491 
492 // // check camera link orientation and decide whether image must be turned around
493 // bool turnAround = false;
494 // tf::StampedTransform transform;
495 // try
496 // {
497 // transform_listener_->lookupTransform("/base_link", "/head_cam3d_link", ros::Time(0), transform);
498 // tfScalar roll, pitch, yaw;
499 // transform.getBasis().getRPY(roll, pitch, yaw, 1);
500 // if (roll > 0.0)
501 // turnAround = true;
502 // // std::cout << "xyz: " << transform.getOrigin().getX() << " " << transform.getOrigin().getY() << " " << transform.getOrigin().getZ() << "\n";
503 // // std::cout << "abcw: " << transform.getRotation().getX() << " " << transform.getRotation().getY() << " " << transform.getRotation().getZ() << " " << transform.getRotation().getW() << "\n";
504 // // std::cout << "rpy: " << roll << " " << pitch << " " << yaw << "\n";
505 // } catch (tf::TransformException ex)
506 // {
507 // if (display_warnings_ == true)
508 // ROS_WARN("%s",ex.what());
509 // }
510 //
511 // if (turnAround == false)
512 // {
513 // // image upright
514 // //sensor_msgs::Image color_image_turned_msg = *color_image_msg;
515 // //color_image_turned_msg.header.stamp = ros::Time::now();
516 // //color_camera_image_pub_.publish(color_image_turned_msg);
517 // //sensor_msgs::PointCloud2::ConstPtr point_cloud_turned_msg = point_cloud_msg;
518 // point_cloud_pub_.publish(point_cloud_msg);
519 // }
520 // else
521 // {
522 // // image upside down
523 // // point cloud
524 // pcl::PointCloud<T> point_cloud_src;
525 // pcl::fromROSMsg(*point_cloud_msg, point_cloud_src);
526 //
527 // boost::shared_ptr<pcl::PointCloud<T> > point_cloud_turned(new pcl::PointCloud<T>);
528 // //point_cloud_turned->header = point_cloud_msg->header;
529 // point_cloud_turned->header = pcl_conversions::toPCL(point_cloud_msg->header);
530 // point_cloud_turned->height = point_cloud_msg->height;
531 // point_cloud_turned->width = point_cloud_msg->width;
532 // //point_cloud_turned->sensor_orientation_ = point_cloud_msg->sensor_orientation_;
533 // //point_cloud_turned->sensor_origin_ = point_cloud_msg->sensor_origin_;
534 // point_cloud_turned->is_dense = true; //point_cloud_msg->is_dense;
535 // point_cloud_turned->resize(point_cloud_src.height*point_cloud_src.width);
536 // for (int v = (int)point_cloud_src.height - 1; v >= 0; v--)
537 // {
538 // for (int u = (int)point_cloud_src.width - 1; u >= 0; u--)
539 // {
540 // (*point_cloud_turned)(point_cloud_src.width-1 - u, point_cloud_src.height-1 - v) = point_cloud_src(u, v);
541 // }
542 // }
543 //
544 // // publish turned data
545 // sensor_msgs::PointCloud2::Ptr point_cloud_turned_msg(new sensor_msgs::PointCloud2);
546 // pcl::toROSMsg(*point_cloud_turned, *point_cloud_turned_msg);
547 // //point_cloud_turned_msg->header.stamp = ros::Time::now();
548 // point_cloud_pub_.publish(point_cloud_turned_msg);
549 //
550 // // cv::namedWindow("test");
551 // // cv::imshow("test", color_image_turned);
552 // // cv::waitKey(10);
553 // }
554 //
555 // if (display_timing_ == true)
556 // ROS_INFO("%d ImageFlip: Time stamp of pointcloud message: %f. Delay: %f.", point_cloud_msg->header.seq, point_cloud_msg->header.stamp.toSec(), ros::Time::now().toSec()-point_cloud_msg->header.stamp.toSec());
557 }
558 
560 {
561  pc_sub_counter_++;
562  if (pc_sub_counter_ == 1)
563  {
564  ROS_DEBUG("ImageFlip::pcConnectCB: Connecting point cloud callback.");
565  point_cloud_sub_ = node_handle_.subscribe<sensor_msgs::PointCloud2>("pointcloud_in", 1, &ImageFlip::pcCallback, this);
566  }
567 }
568 
570 {
571  pc_sub_counter_--;
572  if (pc_sub_counter_ == 0)
573  {
574  ROS_DEBUG("ImageFlip::pcDisconnectCB: Disconnecting point cloud callback.");
576  }
577 }
578 
579 void void_delete_image_msg(const sensor_msgs::Image*)
580 {
581  return;
582 }
583 
584 void ImageFlip::disparityCallback(const stereo_msgs::DisparityImage::ConstPtr& disparity_image_msg)
585 {
586  // read image
587  cv_bridge::CvImageConstPtr disparity_image_ptr;
588  cv::Mat disparity_image;
589  sensor_msgs::ImageConstPtr disparity_image_constptr = boost::shared_ptr<const sensor_msgs::Image>(&disparity_image_msg->image, void_delete_image_msg);
590  if (convertImageMessageToMat(disparity_image_constptr, disparity_image_ptr, disparity_image) == false)
591  return;
592  cv::Mat disparity_image_turned;
593  cv::Mat rot_mat = cv::Mat::zeros(2,3,CV_64FC1);
594 
595  // determine rotation angle
596  double rotation_angle = determineRotationAngle(disparity_image_msg->header.frame_id, disparity_image_msg->header.stamp);
597 
598  // rotate
599  // fast hard coded rotations
600  if (rotation_angle==0. || rotation_angle==360. || rotation_angle==-360.)
601  {
602  disparity_image_turned = disparity_image;
603  rot_mat.at<double>(0,0) = 1.;
604  rot_mat.at<double>(1,1) = 1.;
605  }
606  else if (rotation_angle==90. || rotation_angle==-270.)
607  {
608  // rotate images by 90 degrees
609  disparity_image_turned.create(disparity_image.cols, disparity_image.rows, disparity_image.type());
610  if (disparity_image.type() != CV_32FC1)
611  {
612  ROS_ERROR("ImageFlip::imageCallback: Error: The image format of the disparity image is not CV_32FC1.\n");
613  return;
614  }
615  for (int v = 0; v < disparity_image_turned.rows; v++)
616  for (int u = 0; u < disparity_image_turned.cols; u++)
617  disparity_image_turned.at<float>(v,u) = disparity_image.at<float>(disparity_image.rows-1-u,v);
618  rot_mat.at<double>(0,1) = -1.;
619  rot_mat.at<double>(0,2) = disparity_image.rows;
620  rot_mat.at<double>(1,0) = 1.;
621  }
622  else if (rotation_angle==270. || rotation_angle==-90.)
623  {
624  // rotate images by 270 degrees
625  disparity_image_turned.create(disparity_image.cols, disparity_image.rows, disparity_image.type());
626  if (disparity_image.type() != CV_32FC1)
627  {
628  std::cout << "ImageFlip::imageCallback: Error: The image format of the color image is not CV_32FC1.\n";
629  return;
630  }
631  for (int v = 0; v < disparity_image_turned.rows; v++)
632  for (int u = 0; u < disparity_image_turned.cols; u++)
633  disparity_image_turned.at<float>(v,u) = disparity_image.at<float>(u,disparity_image.cols-1-v);
634  rot_mat.at<double>(0,1) = 1.;
635  rot_mat.at<double>(1,0) = -1.;
636  rot_mat.at<double>(1,2) = disparity_image.cols;
637  }
638  else if (rotation_angle==180 || rotation_angle==-180)
639  {
640  // rotate images by 180 degrees
641  disparity_image_turned.create(disparity_image.rows, disparity_image.cols, disparity_image.type());
642  if (disparity_image.type() != CV_32FC1)
643  {
644  std::cout << "ImageFlip::imageCallback: Error: The image format of the color image is not CV_32FC1.\n";
645  return;
646  }
647  for (int v = 0; v < disparity_image.rows; v++)
648  {
649  float* src = (float*)disparity_image.ptr(v);
650  float* dst = (float*)disparity_image_turned.ptr(disparity_image.rows - v - 1) + (disparity_image.cols - 1);
651  for (int u = 0; u < disparity_image.cols; u++)
652  {
653  *dst = *src;
654  src++;
655  dst--;
656  }
657  }
658  rot_mat.at<double>(0,0) = -1.;
659  rot_mat.at<double>(0,2) = disparity_image.cols;
660  rot_mat.at<double>(1,1) = -1.;
661  rot_mat.at<double>(1,2) = disparity_image.rows;
662  }
663  else
664  {
665  // arbitrary rotation
666  bool switch_aspect_ratio = !(fabs(sin(rotation_angle*CV_PI/180.)) < 0.707106781);
667  if (switch_aspect_ratio==false)
668  disparity_image_turned.create(disparity_image.rows, disparity_image.cols, disparity_image.type()); // automatically decide for landscape or portrait orientation of resulting image
669  else
670  disparity_image_turned.create(disparity_image.cols, disparity_image.rows, disparity_image.type());
671  if (disparity_image.type() != CV_32FC1)
672  {
673  ROS_ERROR("ImageFlip::imageCallback: Error: The image format of the color image is not CV_32FC1.\n");
674  return;
675  }
676 
677  cv::Point center = cv::Point(disparity_image.cols/2, disparity_image.rows/2);
678  rot_mat = cv::getRotationMatrix2D(center, -rotation_angle, 1.0);
679  if (switch_aspect_ratio==true)
680  {
681  rot_mat.at<double>(0,2) += 0.5*(disparity_image_turned.cols-disparity_image.cols);
682  rot_mat.at<double>(1,2) += 0.5*(disparity_image_turned.rows-disparity_image.rows);
683  }
684  cv::warpAffine(disparity_image, disparity_image_turned, rot_mat, disparity_image_turned.size());
685  }
686 
687  // publish turned image
688  cv_bridge::CvImage cv_ptr;
689  cv_ptr.image = disparity_image_turned;
690  cv_ptr.encoding = disparity_image_msg->image.encoding;
691  stereo_msgs::DisparityImage::Ptr disparity_image_turned_msg(new stereo_msgs::DisparityImage);
692  sensor_msgs::ImagePtr disparity_image_turned_msg_image = cv_ptr.toImageMsg();
693  disparity_image_turned_msg_image->header = disparity_image_msg->image.header;
694  disparity_image_turned_msg->image = *disparity_image_turned_msg_image;
695  disparity_image_turned_msg->header = disparity_image_msg->header;
696  disparity_image_pub_.publish(disparity_image_turned_msg);
697 
698  // publish rotation matrix for backward transform to non-turned coordinates using a homogeneous image coordinate representation
699  // the original, non-turned coordinates are of interest if the camera calibration shall be used (the calibration does not apply to coordinates of the turned image)
700  cv::Mat rot33 = cv::Mat::eye(3,3,CV_64FC1);
701  for (int r=0; r<2; ++r)
702  for (int c=0; c<3; ++c)
703  rot33.at<double>(r,c) = rot_mat.at<double>(r,c);
704  cv::Mat rot33_inv = rot33.inv();
705  cob_perception_msgs::Float64ArrayStamped rot33_inv_msg;
706  rot33_inv_msg.header = disparity_image_msg->header;
707  rot33_inv_msg.data.resize(9);
708  for (int r=0; r<3; ++r)
709  for (int c=0; c<3; ++c)
710  rot33_inv_msg.data[r*3+c] = rot33_inv.at<double>(r,c);
712 }
713 
714 
716 {
718  if (disparity_sub_counter_ == 1)
719  {
720  ROS_DEBUG("ImageFlip::disparityConnectCB: Connecting disparity callback.");
721  disparity_image_sub_ = node_handle_.subscribe<stereo_msgs::DisparityImage>("disparityimage_in", 1, &ImageFlip::disparityCallback, this);
722  }
723 }
724 
726 {
728  if (disparity_sub_counter_ == 0)
729  {
730  ROS_DEBUG("ImageFlip::disparityDisconnectCB: Disconnecting disparity callback.");
732  }
733 }
734 
735 
736 }
CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
ros::Publisher point_cloud_2d_transform_pub_
publisher for the transformation matrix for the in plane rotation and translation in the image plane...
Definition: image_flip.h:75
unsigned int pc_sub_counter_
Definition: image_flip.h:70
void imageCallback(const sensor_msgs::ImageConstPtr &color_image_msg)
Definition: image_flip.cpp:195
ros::Subscriber point_cloud_sub_
point cloud input topic
Definition: image_flip.h:73
void publish(const boost::shared_ptr< M > &message) const
ImageFlip(ros::NodeHandle nh)
Definition: image_flip.cpp:26
Subscriber subscribe(const std::string &topic, uint32_t queue_size, void(T::*fp)(M), T *obj, const TransportHints &transport_hints=TransportHints())
image_transport::Publisher color_camera_image_pub_
color camera image output topic
Definition: image_flip.h:78
ros::Publisher disparity_image_pub_
disparity image output topic
Definition: image_flip.h:81
Publisher advertise(const std::string &base_topic, uint32_t queue_size, bool latch=false)
void disparityConnectCB(const ros::SingleSubscriberPublisher &pub)
Definition: image_flip.cpp:715
image_transport::ImageTransport * it_
Definition: image_flip.h:76
void void_delete_image_msg(const sensor_msgs::Image *)
Definition: image_flip.cpp:579
#define ROS_WARN(...)
TFSIMD_FORCE_INLINE Vector3 cross(const Vector3 &v) const
ros::NodeHandle node_handle_
ROS node handle.
Definition: image_flip.h:86
unsigned int img_sub_counter_
Definition: image_flip.h:69
bool waitForTransform(const std::string &target_frame, const std::string &source_frame, const ros::Time &time, const ros::Duration &timeout, const ros::Duration &polling_sleep_duration=ros::Duration(0.01), std::string *error_msg=NULL) const
ros::Publisher color_camera_image_2d_transform_pub_
publisher for the transformation matrix for the in plane rotation and translation in the image plane...
Definition: image_flip.h:79
ros::Publisher point_cloud_pub_
point cloud output topic
Definition: image_flip.h:74
void imgDisconnectCB(const image_transport::SingleSubscriberPublisher &pub)
Definition: image_flip.cpp:335
TFSIMD_FORCE_INLINE tfScalar dot(const Vector3 &v) const
TFSIMD_FORCE_INLINE const tfScalar & z() const
void pcConnectCB(const ros::SingleSubscriberPublisher &pub)
Definition: image_flip.cpp:559
void publish(const sensor_msgs::Image &message) const
void pcDisconnectCB(const ros::SingleSubscriberPublisher &pub)
Definition: image_flip.cpp:569
void imgConnectCB(const image_transport::SingleSubscriberPublisher &pub)
Definition: image_flip.cpp:325
bool param(const std::string &param_name, T &param_val, const T &default_val) const
image_transport::SubscriberFilter color_camera_image_sub_
color camera image input topic
Definition: image_flip.h:77
void disparityCallback(const stereo_msgs::DisparityImage::ConstPtr &disparity_image_msg)
Definition: image_flip.cpp:584
void pcCallback(const sensor_msgs::PointCloud2::ConstPtr &point_cloud_msg)
Definition: image_flip.cpp:346
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
unsigned int disparity_sub_counter_
Definition: image_flip.h:71
#define ROS_DEBUG_STREAM(args)
void subscribe(ImageTransport &it, const std::string &base_topic, uint32_t queue_size, const TransportHints &transport_hints=TransportHints())
ros::Subscriber disparity_image_sub_
disparity image input topic
Definition: image_flip.h:80
TFSIMD_FORCE_INLINE Vector3 & normalize()
void disparityDisconnectCB(const ros::SingleSubscriberPublisher &pub)
Definition: image_flip.cpp:725
std::string reference_frame_
Definition: image_flip.h:57
tf::TransformListener transform_listener_
Definition: image_flip.h:84
void transformVector(const std::string &target_frame, const geometry_msgs::Vector3Stamped &stamped_in, geometry_msgs::Vector3Stamped &stamped_out) const
#define ROS_ERROR(...)
ros::Publisher disparity_image_2d_transform_pub_
publisher for the transformation matrix for the in plane rotation and translation in the image plane...
Definition: image_flip.h:82
double determineRotationAngle(const std::string &camera_frame_id, const ros::Time &time)
Definition: image_flip.cpp:90
TFSIMD_FORCE_INLINE tfScalar length() const
bool convertImageMessageToMat(const sensor_msgs::Image::ConstPtr &color_image_msg, cv_bridge::CvImageConstPtr &color_image_ptr, cv::Mat &color_image)
Definition: image_flip.cpp:178
#define ROS_DEBUG(...)


cob_image_flip
Author(s): Richard Bormann
autogenerated on Sun Oct 18 2020 13:13:18