object_detection_visualizer_node.cpp
Go to the documentation of this file.
1 /*
2  * Copyright 2017 Fraunhofer Institute for Manufacturing Engineering and Automation (IPA)
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9 
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 
18 #include <ros/ros.h>
19 
20 // topics
24 //#include <image_transport/image_transport.h>
25 //#include <image_transport/subscriber_filter.h>
26 
27 #include <sensor_msgs/PointCloud2.h>
28 #include <sensor_msgs/Image.h>
30 #include <sensor_msgs/CameraInfo.h>
31 
32 #include <visualization_msgs/Marker.h>
33 #include <visualization_msgs/MarkerArray.h>
34 
35 // opencv
36 #include <opencv2/opencv.hpp>
37 
38 #include <cv_bridge/cv_bridge.h>
39 
40 // PCL
41 #include <pcl/point_cloud.h>
42 #include <pcl/point_types.h>
43 #include <pcl_ros/point_cloud.h>
44 
46 
47 #include <cob_object_detection_msgs/DetectionArray.h>
48 
49 #include <map>
50 #include <vector>
51 #include <string>
52 #include <sstream>
53 
54 
56 {
57 public:
60  {
61  // parameters
62  ros::NodeHandle pnh("~");
63  std::cout << "\n========== ObjectDetectionVisualizer Parameters ==========\n";
64  pnh.param("display_rviz_markers", display_rviz_markers_, true);
65  std::cout << "display_rviz_markers: " << display_rviz_markers_ << std::endl;
66  pnh.param("display_detection_image", display_detection_image_, true);
67  std::cout << "display_detection_image: " << display_detection_image_ << std::endl;
68 
69  projection_matrix_ = cv::Mat::eye(3, 3, CV_64FC1);
70 
71  // Rviz visualization
72  if (display_rviz_markers_ == true)
73  {
75  marker_array_publisher_ = node_handle_.advertise<visualization_msgs::MarkerArray>("object_detection_marker_array", 0);
76  }
77 
78  // detection image visualization
79  if (display_detection_image_ == true)
80  {
81 // it_ = new image_transport::ImageTransport(node_handle_);
82 // color_image_sub_.subscribe(*it_, "color_image", 1);
85 
86 // sync_detection_array_sub_.subscribe(node_handle_, "detection_array_topic", 1);
87 // sync_input_ = new message_filters::Synchronizer<message_filters::sync_policies::ApproximateTime<cob_object_detection_msgs::DetectionArray, sensor_msgs::PointCloud2> >(2);
88 // sync_input_->connectInput(sync_detection_array_sub_, pointcloud_sub_);
89 // sync_input_->registerCallback(boost::bind(&ObjectDetectionVisualizer::detectionImageCallback, this, _1, _2));
90 
92  }
93  }
94 
96  {
97 // if (it_ != NULL)
98 // delete it_;
99 // if (sync_input_ != NULL)
100 // delete sync_input_;
101  }
102 
103 private:
104 
105  void objectDetectionArrayCallback(const cob_object_detection_msgs::DetectionArray::ConstPtr& object_detection_msg)
106  {
107  // 3 arrows for each coordinate system of each detected fiducial
108  const unsigned int detection_array_size = object_detection_msg->detections.size();
109  const unsigned int markers_per_detection = 3+1;
110  const unsigned int marker_array_size = markers_per_detection * detection_array_size;
111  if (marker_array_size >= prev_marker_array_size_) {
112  marker_array_msg_.markers.resize(marker_array_size);
113  }
114 
115  // publish a coordinate system from arrow markers for each object
116  for (unsigned int i = 0; i < detection_array_size; ++i)
117  {
118  for (unsigned int j = 0; j < 3; ++j)
119  {
120  unsigned int idx = markers_per_detection * i + j;
121  marker_array_msg_.markers[idx].header = object_detection_msg->header;
122  marker_array_msg_.markers[idx].ns = "object_detection";
123  marker_array_msg_.markers[idx].id = idx;
124  marker_array_msg_.markers[idx].type = visualization_msgs::Marker::ARROW;
125  marker_array_msg_.markers[idx].action = visualization_msgs::Marker::ADD;
126  marker_array_msg_.markers[idx].color.a = 0.85;
127  marker_array_msg_.markers[idx].color.r = 0;
128  marker_array_msg_.markers[idx].color.g = 0;
129  marker_array_msg_.markers[idx].color.b = 0;
130 
131  marker_array_msg_.markers[idx].points.resize(2);
132  marker_array_msg_.markers[idx].points[0].x = 0.0;
133  marker_array_msg_.markers[idx].points[0].y = 0.0;
134  marker_array_msg_.markers[idx].points[0].z = 0.0;
135  marker_array_msg_.markers[idx].points[1].x = 0.0;
136  marker_array_msg_.markers[idx].points[1].y = 0.0;
137  marker_array_msg_.markers[idx].points[1].z = 0.0;
138  if (j == 0)
139  {
140  marker_array_msg_.markers[idx].points[1].x = 0.2;
141  marker_array_msg_.markers[idx].color.r = 255;
142  }
143  else if (j == 1)
144  {
145  marker_array_msg_.markers[idx].points[1].y = 0.2;
146  marker_array_msg_.markers[idx].color.g = 255;
147  }
148  else if (j == 2)
149  {
150  marker_array_msg_.markers[idx].points[1].z = 0.2;
151  marker_array_msg_.markers[idx].color.b = 255;
152  }
153 
154  marker_array_msg_.markers[idx].pose = object_detection_msg->detections[i].pose.pose;
155 
156  marker_array_msg_.markers[idx].lifetime = ros::Duration(2);
157  marker_array_msg_.markers[idx].scale.x = 0.01; // shaft diameter
158  marker_array_msg_.markers[idx].scale.y = 0.015; // head diameter
159  marker_array_msg_.markers[idx].scale.z = 0; // head length 0=default
160  }
161  for (unsigned int j = 3; j < 4; ++j)
162  {
163  unsigned int idx = markers_per_detection * i + j;
164  marker_array_msg_.markers[idx].header = object_detection_msg->header;
165  marker_array_msg_.markers[idx].ns = "object_detection";
166  marker_array_msg_.markers[idx].id = idx;
167  marker_array_msg_.markers[idx].type = visualization_msgs::Marker::CUBE;
168  marker_array_msg_.markers[idx].action = visualization_msgs::Marker::ADD;
169  marker_array_msg_.markers[idx].color.a = 0.25;
170  marker_array_msg_.markers[idx].color.r = 128;
171  marker_array_msg_.markers[idx].color.g = 128;
172  marker_array_msg_.markers[idx].color.b = 128;
173 
174  marker_array_msg_.markers[idx].pose = object_detection_msg->detections[i].pose.pose;
175 
176  marker_array_msg_.markers[idx].lifetime = ros::Duration(2);
177  marker_array_msg_.markers[idx].scale.x = object_detection_msg->detections[i].bounding_box_lwh.x;
178  marker_array_msg_.markers[idx].scale.y = object_detection_msg->detections[i].bounding_box_lwh.y;
179  marker_array_msg_.markers[idx].scale.z = object_detection_msg->detections[i].bounding_box_lwh.z;
180  }
181 
182  if (prev_marker_array_size_ > marker_array_size)
183  for (unsigned int i = marker_array_size; i < prev_marker_array_size_; ++i)
184  marker_array_msg_.markers[i].action = visualization_msgs::Marker::DELETE;
185  prev_marker_array_size_ = marker_array_size;
186 
188  }
189  }
190 
191  bool convertImageMessageToMat(const sensor_msgs::Image::ConstPtr& image_msg, cv_bridge::CvImageConstPtr& image_ptr, cv::Mat& image)
192  {
193  try
194  {
195  image_ptr = cv_bridge::toCvShare(image_msg, image_msg->encoding);
196  }
197  catch (cv_bridge::Exception& e)
198  {
199  ROS_ERROR("ObjectDetectionVisualizer::convertColorImageMessageToMat: cv_bridge exception: %s", e.what());
200  return false;
201  }
202  image = image_ptr->image;
203 
204  return true;
205  }
206 
207  bool convertPclMessageToMat(const sensor_msgs::PointCloud2::ConstPtr& pointcloud_msg, pcl::PointCloud<pcl::PointXYZRGB>& pointcloud, cv::Mat& color_image)
208  {
209  pcl::fromROSMsg(*pointcloud_msg, pointcloud);
210  color_image.create(pointcloud.height, pointcloud.width, CV_8UC3);
211  for (int v = 0; v < (int)pointcloud.height; v++)
212  {
213  for (int u = 0; u < (int)pointcloud.width; u++)
214  {
215  const pcl::PointXYZRGB& point = pointcloud(u, v);
216  color_image.at<cv::Vec3b>(v,u) = cv::Vec3b(point.b, point.g, point.r);
217  }
218  }
219  return true;
220  }
221 
222  void pointcloudCallback(const sensor_msgs::PointCloud2::ConstPtr& pointcloud_msg)
223  {
224  // secure this access with a mutex
225  boost::mutex::scoped_lock lock(color_image_mutex_);
226 
227  pcl::PointCloud<pcl::PointXYZRGB> pointcloud;
228  convertPclMessageToMat(pointcloud_msg, pointcloud, color_image_);
229  }
230 
231  void objectDetectionDisplayCallback(const cob_object_detection_msgs::DetectionArray::ConstPtr& object_detection_msg)
232  {
233  cv::Mat image;
234  {
235  // secure this access with a mutex
236  boost::mutex::scoped_lock lock(color_image_mutex_);
237  image = color_image_.clone();
238  }
239  renderDetections(image, object_detection_msg);
240 
241  cv::Mat display;
242  cv::resize(image, display, cv::Size(), 0.5, 0.5);
243  cv::imshow("object detections", display);
244  cv::waitKey(50);
245 
246  std::stringstream file;
247  file << "object_detection_visualizer/" << image_counter_ << ".png";
248  cv::imwrite(file.str().c_str(), image);
249  image_counter_++;
250  }
251 
252  void detectionImageCallback(const cob_object_detection_msgs::DetectionArray::ConstPtr& object_detection_msg, const sensor_msgs::PointCloud2::ConstPtr& pointcloud_msg)
253  {
254 // // read image
255 // cv_bridge::CvImageConstPtr color_image_ptr;
256 // cv::Mat color_image;
257 // if (convertImageMessageToMat(image_msg, color_image_ptr, color_image) == false)
258 // return;
259 
260  cv::Mat image;
261  pcl::PointCloud<pcl::PointXYZRGB> pointcloud;
262  convertPclMessageToMat(pointcloud_msg, pointcloud, image);
263 
264  renderDetections(image, object_detection_msg);
265 
266  cv::Mat display;
267  cv::resize(image, display, cv::Size(), 0.5, 0.5);
268  cv::imshow("object detections", display);
269  cv::waitKey(50);
270 
271  std::stringstream file;
272  file << "object_detection_visualizer/" << image_counter_ << ".png";
273  cv::imwrite(file.str().c_str(), image);
274  image_counter_++;
275  }
276 
277  bool renderDetections(cv::Mat& image, const cob_object_detection_msgs::DetectionArray::ConstPtr& object_detection_msg)
278  {
279  if (projection_matrix_received_ == false)
280  {
281  ROS_WARN("Did not receive a projection matrix yet.");
282  return false;
283  }
284 
285  // render 3-D detection results
286  // project bounding box of the object on the image
287  if (object_detection_msg->detections.size() <= 0)
288  return false;
289 
290  for (size_t o=0; o<object_detection_msg->detections.size(); ++o)
291  {
292  const cob_object_detection_msgs::Detection& detection = object_detection_msg->detections[o];
293 
294  // compute 8 corner points of bounding box and transform them into the camera coordinate system
295  // reproduce 8 points in a special order (simplifies drawing a box):
296  // x y z
297  // - - -
298  // + - -
299  // + + -
300  // - + -
301  // - - +
302  // + - +
303  // + + +
304  // - + +
305  std::cout << "New box corners for object " << detection.label << ": \n";
306  Eigen::Affine3d pose;
307  tf::poseMsgToEigen(detection.pose.pose, pose); // i.e. p = the transform pointing from camera to object coordinate system
308  std::vector<cv::Vec3d> corners_3d;
309  for (int k=0; k<2; ++k) // z is given in full length, the object center is at the bottom
310  {
311  int s = 1;
312  for (int j=-1; j<2; j+=2) // x and y are only provided in half side lengths, center is in the object center
313  {
314  for (int i=-1; i<2; i+=2)
315  {
316  Eigen::Affine3d corner = pose * Eigen::Translation3d(i*s*detection.bounding_box_lwh.x, j*detection.bounding_box_lwh.y, k*detection.bounding_box_lwh.z);
317  corners_3d.push_back(cv::Vec3d(corner.translation()(0), corner.translation()(1), corner.translation()(2)));
318  //std::cout << " > " << corner.translation()(0) << ", " << corner.translation()(1) << ", " << corner.translation()(2) << std::endl;
319  }
320  s *= -1;
321  }
322  }
323 
324  // project xyz coordinates to image coordinates
325  std::vector<cv::Point> corners_2d(corners_3d.size());
326  for (size_t i=0; i<corners_3d.size(); ++i)
327  corners_2d[i] = projectPoint(corners_3d[i]);
328 
329  // draw all detected bounding boxes
330  if (object_name_to_color_map_.find(detection.label) == object_name_to_color_map_.end())
331  object_name_to_color_map_[detection.label] = CV_RGB(rand()%256,rand()%256,rand()%256);
332  if (drawDetectedModel3D(image, corners_2d, object_name_to_color_map_[detection.label]) == false )
333  {
334  ROS_ERROR("Unable to draw detected bounding boxes!");
335  return false;
336  }
337 
338  // annotate bounding box with object name
339  cv::Point center_top;
340  for (int i=4; i<8; ++i)
341  center_top += corners_2d[i];
342  center_top *= 0.25;
343  cv::putText(image, detection.label, center_top, cv::FONT_HERSHEY_SIMPLEX, 0.75, object_name_to_color_map_[detection.label], 2);
344  }
345 
346  return true;
347  }
348 
349  cv::Point projectPoint(const cv::Vec3d& point_3d)
350  {
351  if (projection_matrix_received_ == false)
352  {
353  ROS_WARN("Did not receive a projection matrix yet.");
354  return cv::Point();
355  }
356  cv::Mat uvw = projection_matrix_ * cv::Mat(point_3d);
357  return cv::Point(uvw.at<double>(0,0)/uvw.at<double>(2,0), uvw.at<double>(1,0)/uvw.at<double>(2,0));
358  }
359 
360  bool drawDetectedModel3D(cv::Mat image, const std::vector<cv::Point>& corners_2d, const cv::Scalar& color)
361  {
362  // Draw lower rectangle of bounding box
363  for (unsigned int j=0; j<3; ++j)
364  {
365  cv::line(image, corners_2d[j], corners_2d[j+1], color, 3);
366  }
367  cv::line(image, corners_2d[3], corners_2d[0], color, 3);
368 
369  // Draw upper rectangle of bounding box
370  for (unsigned int j=4; j<corners_2d.size()-1; j++)
371  {
372  cv::line(image, corners_2d[j], corners_2d[j+1], color, 3);
373  }
374  cv::line(image, corners_2d[7], corners_2d[4], color, 3);
375 
376  // Draw side lines of bounding box
377  for (unsigned int j=0; j<4; j++)
378  {
379  cv::line(image, corners_2d[j], corners_2d[j+4], color, 3);
380  }
381 
382  return true;
383  }
384 
385  void pointcloudInfoCallback(const sensor_msgs::CameraInfoConstPtr &data)
386  {
387  double* f_ptr = projection_matrix_.ptr<double>(0);
388  for (int i = 0; i < 9; i++)
389  f_ptr[i] = data->K[i];
390 
391  ROS_INFO("[object_detection_visualizer] Received new projection matrix:");
392  std::cout << "\t... / " << std::setw(8) << projection_matrix_.at<double>(0, 0) << " ";
393  std::cout << std::setw(8) << projection_matrix_.at<double>(0, 1) << " ";
394  std::cout << std::setw(8) << projection_matrix_.at<double>(0, 2) << " \\ " << std::endl;
395  std::cout << "\t... | " << std::setw(8) << projection_matrix_.at<double>(1, 0) << " ";
396  std::cout << std::setw(8) << projection_matrix_.at<double>(1, 1) << " ";
397  std::cout << std::setw(8) << projection_matrix_.at<double>(1, 2) << " | "<< std::endl;;
398  std::cout << "\t... \\ " << std::setw(8) << projection_matrix_.at<double>(2, 0) << " ";
399  std::cout << std::setw(8) << projection_matrix_.at<double>(2, 1) << " ";
400  std::cout << std::setw(8) << projection_matrix_.at<double>(2, 2) << " / "<< std::endl << std::endl;
401 
404  }
405 
410  visualization_msgs::MarkerArray marker_array_msg_;
411 
412 // image_transport::ImageTransport* it_;
413 // image_transport::SubscriberFilter color_image_sub_; ///< color camera image topic
414 // message_filters::Subscriber<sensor_msgs::PointCloud2> pointcloud_sub_; ///< receives the detection messages
415 // message_filters::Subscriber<cob_object_detection_msgs::DetectionArray> sync_detection_array_sub_; ///< receives the detection messages
416 // message_filters::Synchronizer<message_filters::sync_policies::ApproximateTime<cob_object_detection_msgs::DetectionArray, sensor_msgs::PointCloud2> >* sync_input_;
419 
421  cv::Mat projection_matrix_; // 3x3 intrinsic matrix
422 
423  boost::mutex color_image_mutex_; // secures read and write operations on camera data
424  cv::Mat color_image_;
425 
426  std::map<std::string, cv::Scalar> object_name_to_color_map_;
428 
429  // parameters
432 };
433 
434 
435 //#######################
436 //#### main programm ####
437 int main(int argc, char** argv)
438 {
440  ros::init(argc, argv, "object_detection_visualizer");
441 
443  ros::NodeHandle nh;
444 
447 
448 // ros::MultiThreadedSpinner spinner(2); // Use 4 threads
449 // spinner.spin();
450  ros::spin();
451 
452  return 0;
453 }
CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
bool renderDetections(cv::Mat &image, const cob_object_detection_msgs::DetectionArray::ConstPtr &object_detection_msg)
void publish(const boost::shared_ptr< M > &message) const
void objectDetectionDisplayCallback(const cob_object_detection_msgs::DetectionArray::ConstPtr &object_detection_msg)
Subscriber subscribe(const std::string &topic, uint32_t queue_size, void(T::*fp)(M), T *obj, const TransportHints &transport_hints=TransportHints())
void poseMsgToEigen(const geometry_msgs::Pose &m, Eigen::Affine3d &e)
XmlRpcServer s
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
void fromROSMsg(const sensor_msgs::PointCloud2 &cloud, pcl::PointCloud< T > &pcl_cloud)
bool convertPclMessageToMat(const sensor_msgs::PointCloud2::ConstPtr &pointcloud_msg, pcl::PointCloud< pcl::PointXYZRGB > &pointcloud, cv::Mat &color_image)
#define ROS_WARN(...)
ROSCPP_DECL void spin(Spinner &spinner)
void pointcloudInfoCallback(const sensor_msgs::CameraInfoConstPtr &data)
#define ROS_INFO(...)
bool convertImageMessageToMat(const sensor_msgs::Image::ConstPtr &image_msg, cv_bridge::CvImageConstPtr &image_ptr, cv::Mat &image)
bool param(const std::string &param_name, T &param_val, const T &default_val) const
void objectDetectionArrayCallback(const cob_object_detection_msgs::DetectionArray::ConstPtr &object_detection_msg)
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
int main(int argc, char **argv)
visualization_msgs::MarkerArray marker_array_msg_
bool drawDetectedModel3D(cv::Mat image, const std::vector< cv::Point > &corners_2d, const cv::Scalar &color)
void pointcloudCallback(const sensor_msgs::PointCloud2::ConstPtr &pointcloud_msg)
void detectionImageCallback(const cob_object_detection_msgs::DetectionArray::ConstPtr &object_detection_msg, const sensor_msgs::PointCloud2::ConstPtr &pointcloud_msg)
cv::Point projectPoint(const cv::Vec3d &point_3d)
#define ROS_ERROR(...)
std::map< std::string, cv::Scalar > object_name_to_color_map_


cob_object_detection_visualizer
Author(s): Richard Bormann
autogenerated on Sun Oct 18 2020 13:13:13