descriptor_surface_based_recognition.cpp
Go to the documentation of this file.
1 
20 #include <rviz/mesh_loader.h>
22 
23 #include <OGRE/Ogre.h>
24 #include <OGRE/OgreException.h>
25 #include <OGRE/OgreRoot.h>
26 
27 #include <OgreMesh.h>
28 #include <OGRE/OgreLight.h>
29 
30 #include <ros/package.h>
31 #include <ros/console.h>
32 #include <ros_uri/ros_uri.hpp>
33 
34 #include <pcl/common/common.h>
35 #include <pcl/point_cloud.h>
37 #include <pcl/point_types.h>
38 #include <pcl/features/integral_image_normal.h>
39 #include <pcl/common/transforms.h>
40 #include <pcl/filters/voxel_grid.h>
41 
42 #include <asr_halcon_bridge/halcon_image.h>
43 
44 #include <iostream>
45 #include <fstream>
46 
48 #include <rapidxml.hpp>
49 #include <rapidxml_utils.hpp>
50 
51 
52 
53 
54 
56 
57 std::string PACKAGE_PATH;
58 
60 {
61 
62  ROS_DEBUG_STREAM("Initialize DescriptorSurfaceBasedRecognition");
63 
64  double far_plane = 100;
65  double near_plane = 0.01;
66  double cx = 648.95153;
67  double cy = 468.29311;
68  double fx = 1689.204742;
69  double fy = 1689.204742;
70  double image_width = 1292.0;
71  double image_height = 964.0;
72  int render_width = 800;
73  int render_height = 600;
74  nh_.getParam("pose_val_far_plane", far_plane);
75  nh_.getParam("pose_val_near_plane", near_plane);
76  nh_.getParam("pose_val_cx", cx);
77  nh_.getParam("pose_val_cy", cy);
78  nh_.getParam("pose_val_fx", fx);
79  nh_.getParam("pose_val_fy", fy);
80  nh_.getParam("pose_val_image_width", image_width);
81  nh_.getParam("pose_val_image_height", image_height);
82  nh_.getParam("pose_val_render_image_width", render_width);
83  nh_.getParam("pose_val_render_image_height", render_height);
84 
85  nh_.getParam("image_color_topic", image_color_topic_);
86  nh_.getParam("image_mono_topic", image_mono_topic_);
87  nh_.getParam("point_cloud_topic", point_cloud_topic_);
88  nh_.getParam("output_objects_topic", output_objects_topic_);
89  nh_.getParam("output_marker_topic", output_marker_topic_);
90  nh_.getParam("output_marker_bounding_box_topic", output_marker_bounding_box_topic_);
91  nh_.getParam("output_cloud_topic", output_cloud_topic_);
92  nh_.getParam("output_image_topic", output_image_topic_);
93 
94  PACKAGE_PATH = ros::package::getPath("asr_descriptor_surface_based_recognition");
95 
96  // Set up dynamic reconfigure
97  reconfigure_server_.setCallback(boost::bind(&DescriptorSurfaceBasedRecognition::configCallback, this, _1, _2));
98 
99  if (config_.evaluation) {
100  //clear output file
101  std::string path = PACKAGE_PATH + "/" + OUTPUT_EVALUATION_DIR;
102  boost::filesystem::path dir(path.c_str());
103  boost::filesystem::create_directory(dir);
104  outstream_times_.open((path + "/" + OUTPUT_EVALUATION_FILE_TIME).c_str(), std::ofstream::out | std::ofstream::trunc);
105  outstream_poses_.open((path + "/" + OUTPUT_EVALUATION_FILE_POSES).c_str(), std::ofstream::out | std::ofstream::trunc);
106  }
107 
108  object_db_service_client_ = nh_.serviceClient<asr_object_database::ObjectMetaData>(OBJECT_DB_SERVICE_OBJECT_TYPE);
109  object_db_meshes_service_client_ = nh_.serviceClient<asr_object_database::RecognizerListMeshes>(OBJECT_DB_SERVICE_OBJECT_MESHES);
110 
111  ROS_DEBUG_STREAM("Database clients initialized");
112 
113  if (config_.usePoseValidation) {
114  pose_val_ = PoseValidation(image_width, image_height, far_plane, near_plane, cx, cy, fx, fy, render_width, render_height);
116  }
117 
118  ROS_DEBUG_STREAM("Pose validation initialized");
119 
121  msgs_marker_array_ = boost::make_shared<visualization_msgs::MarkerArray>();
122  msgs_box_marker_array_ = boost::make_shared<visualization_msgs::MarkerArray>();
123 
124  //ros services
129 
130  //ros subscriptions
134 
135  //ros publishers
136  marker_pub_ = nh_.advertise<visualization_msgs::Marker> (output_marker_topic_, 1);
137  boxes_pub_ = nh_.advertise<visualization_msgs::MarkerArray> (output_marker_bounding_box_topic_, 1);
138  cloud_pub_ = nh_.advertise<sensor_msgs::PointCloud2> (output_cloud_topic_, 1);
139  objects_pub_ = nh_.advertise<asr_msgs::AsrObject> (output_objects_topic_, 1);
140  image_pub_ = nh_.advertise<sensor_msgs::Image> (output_image_topic_, 1);
141 
143  sync_policy_->registerCallback(boost::bind(&DescriptorSurfaceBasedRecognition::rosCallback, this, _1, _2, _3));
144 
145  ROS_DEBUG_STREAM("ROS services and publishers initialized");
146 
147  ROS_INFO_STREAM("TO RECOGNIZE A NEW OBJECT CALL THE FOLLOWING SERVICE: " << std::endl <<
148  " /asr_descriptor_surface_based_recognition/get_recognizer <object_type_name> <instance_number> <use_pose_validation>" << std::endl <<
149  " TO END RECOGNITION FOR A SPECIFIC OBJECT CALL: " << std::endl <<
150  " /asr_descriptor_surface_based_recognition/release_recognizer <object_type_name>" << std::endl <<
151  " TO END RECOGNITION FOR ALL OBJECTS OF A TOPIC CALL: " << std::endl <<
152  " /asr_descriptor_surface_based_recognition/clear_all_recognizers" << std::endl <<
153  " TO GET A LIST OF ALL OBJECTS IT IS CURRENTLY SEARCHED FOR CALL: " << std::endl <<
154  " /asr_descriptor_surface_based_recognition/get_object_list");
155 }
156 
157 bool DescriptorSurfaceBasedRecognition::processGetRecognizerRequest(GetRecognizer::Request &req, GetRecognizer::Response &res) {
158  std::string name = req.object_name;
159  int count = req.count;
160  if ((count < 1) || (count > 10)) {
161  count = 1;
162  }
163  bool use_pose_val = req.use_pose_val;
164 
165  startObjectRecognition(name, count, use_pose_val) ? res.success = true : res.success = false;
166  res.object_name = name;
167 
168  return true;
169 }
170 
171 bool DescriptorSurfaceBasedRecognition::processReleaseRecognizerRequest(ReleaseRecognizer::Request &req, ReleaseRecognizer::Response &res) {
172  std::string name = req.object_name;
173  stopObjectRecognition(name);
174  return true;
175 }
176 
177 bool DescriptorSurfaceBasedRecognition::processGetObjectListRequest(GetObjectList::Request &req, GetObjectList::Response &res) {
178  std::vector<std::string> object_names;
179  for (std::vector<ObjectDescriptor>::iterator iter = objects_.begin(); iter != objects_.end(); ++iter) {
180  object_names.push_back(iter->getName());
181  }
182  res.objects = object_names;
183  return true;
184 }
185 
186 bool DescriptorSurfaceBasedRecognition::processClearAllRecognizersRequest(ClearAllRecognizers::Request &req, ClearAllRecognizers::Response &res) {
187  objects_.clear();
188  meshes_.clear();
189  return true;
190 }
191 
193 
194  asr_object_database::RecognizerListMeshes objectMeshes;
195  objectMeshes.request.recognizer = OBJECT_DATABASE_CATEGORY;
196  object_db_meshes_service_client_.call(objectMeshes.request, objectMeshes.response);
197 
198  std::vector<std::string> mesh_paths = objectMeshes.response.meshes;
199  for (std::vector<std::string>::iterator iter = mesh_paths.begin(); iter != mesh_paths.end(); ++iter) {
201  }
202 }
203 
204 bool DescriptorSurfaceBasedRecognition::startObjectRecognition(std::string name, int count, bool use_pose_val) {
205  asr_object_database::ObjectMetaData objectType;
206  objectType.request.object_type = name;
207  objectType.request.recognizer = OBJECT_DATABASE_CATEGORY;
208  object_db_service_client_.call(objectType.request, objectType.response);
209  if (objectType.response.is_valid) {
210  std::string object_name = name;
211  std::string xml_path = ros_uri::absolute_path(objectType.response.object_folder) + "/" + name + ".xml";
212 
213  try {
214  rapidxml::file<> xmlFile(xml_path.c_str());
215  rapidxml::xml_document<> doc;
216  doc.parse<0>(xmlFile.data());
217 
218  rapidxml::xml_node<> *first_node = doc.first_node();
219  if (first_node) {
220  rapidxml::xml_node<> *node_name = first_node->first_node("name");
221  if (node_name) {
222  object_name = node_name->value();
223  }
224 
225  }
226  } catch(std::runtime_error err) {
227  ROS_DEBUG_STREAM("Can't parse meta-xml-file (add_object -> name). error: " << err.what());
228  } catch (rapidxml::parse_error err) {
229  ROS_DEBUG_STREAM("Can't parse meta-xml-file (add_object -> name). error: " << err.what());
230  }
231 
232  bool object_in_list = false;
233  for (unsigned int i = 0; i < objects_.size(); i++) {
234  if (objects_.at(i).getName() == object_name) {
235  if (objects_.at(i).getInstanceCount() != count) {
236  objects_.at(i).setCount(count);
237  }
238  if (objects_.at(i).getUsePoseVal() != use_pose_val) {
239  objects_.at(i).setUsePoseVal(use_pose_val);
240  }
241  object_in_list = true;
242  break;
243  }
244  }
245  if (!object_in_list) {
246  ObjectDescriptor object(objectType.response.object_folder + "/", xml_path, count, use_pose_val);
247  if (object.isValid()) {
248  objects_.push_back(object);
249  if (pose_val_.isInitialized()) {
250  meshes_.push_back(rviz::loadMeshFromResource(object.getMesh()));
251  }
252  }
253 
254  }
255  return true;
256  }
257  return false;
258 }
259 
260 
262  for (unsigned int i = 0; i < objects_.size(); i++) {
263  if (objects_.at(i).getName() == name) {
264  objects_.erase(objects_.begin() + i);
265  if (pose_val_.isInitialized()) {
266  meshes_.erase(meshes_.begin() + i);
267  }
268  break;
269  }
270  }
271 }
272 
273 
274 
275 void DescriptorSurfaceBasedRecognition::rosCallback(const sensor_msgs::ImageConstPtr &input_image_guppy, const sensor_msgs::ImageConstPtr& input_image_guppy_mono, const sensor_msgs::PointCloud2ConstPtr &input_point_cloud_with_guppy)
276 {
277  frame_counter_++;
278 
279  if (objects_.size() > 0) {
280  ROS_INFO_STREAM("Enter callback for new frame");
281  ros::Time time_frame_enter = ros::Time::now();
282 
283  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud_with_guppy_ptr (new pcl::PointCloud<pcl::PointXYZ>);
284  pcl::fromROSMsg(*input_point_cloud_with_guppy, *point_cloud_with_guppy_ptr);
285 
286  HalconCpp::HImage scene_image = *halcon_bridge::toHalconCopy(input_image_guppy)->image;
287  HalconCpp::HImage scene_image_mono = *halcon_bridge::toHalconCopy(input_image_guppy_mono)->image;
288 
289  ROS_INFO_STREAM("Search objects");
290  std::string object_list = "[";
291  for (unsigned int i = 0; i < objects_.size(); i++) {
292  if (i > 0) {
293  object_list += ", ";
294  }
295  object_list += objects_.at(i).getName();
296  }
297  object_list += "]";
298  ROS_DEBUG_STREAM("Searching for: " << object_list);
299 
300  //prepare search
301  std::vector<PoseRecognition*> pose_recs;
302  for (unsigned int i = 0; i < objects_.size(); i++) {
303  HalconCpp::HImage scene_image_curr = scene_image;
304  HalconCpp::HImage scene_image_mono_curr = scene_image_mono;
305  for (unsigned int j = 0; j < last_frame_positions_.size(); j++) {
306  if (last_frame_positions_.at(j).getObjectIndex() == (int)(i)) {
307  if ((int)(last_frame_positions_.at(j).getPositions().size()) == objects_.at(i).getInstanceCount()) {
308  HalconCpp::HRegion region;
309  for (unsigned int k = 0; k < last_frame_positions_.at(j).getPositions().size(); k++) {
310  if (k == 0) {
311  region.GenRectangle1(last_frame_positions_.at(j).getPositions().at(k)[0] - last_frame_positions_.at(j).getSearchRadii().at(k), last_frame_positions_.at(j).getPositions().at(k)[1] - last_frame_positions_.at(j).getSearchRadii().at(k), last_frame_positions_.at(j).getPositions().at(k)[0] + last_frame_positions_.at(j).getSearchRadii().at(k), last_frame_positions_.at(j).getPositions().at(k)[1] + last_frame_positions_.at(j).getSearchRadii().at(k));
312  } else {
313  HalconCpp::HRegion region_curr;
314  region_curr.GenRectangle1(last_frame_positions_.at(j).getPositions().at(k)[0] - last_frame_positions_.at(j).getSearchRadii().at(k), last_frame_positions_.at(j).getPositions().at(k)[1] - last_frame_positions_.at(j).getSearchRadii().at(k), last_frame_positions_.at(j).getPositions().at(k)[0] + last_frame_positions_.at(j).getSearchRadii().at(k), last_frame_positions_.at(j).getPositions().at(k)[1] + last_frame_positions_.at(j).getSearchRadii().at(k));
315  region = region.Union2(region_curr);
316  }
317  }
318  scene_image_curr = scene_image_curr.ReduceDomain(region);
319  scene_image_mono_curr = scene_image_mono_curr.ReduceDomain(region);
320  }
321  break;
322  }
323  }
324  pose_recs.push_back(new PoseRecognition(scene_image_curr, scene_image_mono_curr, point_cloud_with_guppy_ptr, &(objects_.at(i)), config_.medianPointsOffset, config_.samplingDistance, config_.keypointFraction, config_.evaluation, objects_.at(i).getInstanceCount()));
325  }
326 
327 
328 
329  //search
330  for (unsigned int i = 0; i < pose_recs.size(); i++) {
331  thread_pool_.schedule(boost::bind(&DescriptorSurfaceBasedRecognition::threadTask, this, pose_recs.at(i)));
332  }
333  thread_pool_.wait();
335  ros::Duration search_duration = ros::Time::now() - time_frame_enter;
336 
337  //validate found poses
338  ros::Duration validation_duration;
339  if (pose_val_.isInitialized()) {
340  ros::Time validation_time_start = ros::Time::now();
341  for (unsigned int i = 0; i < pose_recs.size(); i++) {
342  if (objects_.at(i).getUsePoseVal()) {
343  for (unsigned int j = 0; j < pose_recs.at(i)->getResults().size(); j++) {
344  if (pose_recs.at(i)->getResults().at(j)->checkModelFound()) {
345  HalconCpp::HTuple matrix_tuple = pose_val_.validateObject(&objects_.at(i), pose_recs.at(i)->getResults().at(j), meshes_.at(i));
346  if (matrix_tuple.Length() > 0) {
347  HalconCpp::HHomMat2D matrix;
348  matrix.SetFromTuple(matrix_tuple);
349  if (!(pose_val_.compareHomographyMatrices(pose_recs.at(i)->getResults().at(j)->getTransMatrix2D(), matrix, config_.poseValidationDistanceError))) {
350  pose_recs.at(i)->getResults().at(j)->setModelFound(false);
351  pose_recs.at(i)->getResults().at(j)->setPoseValid(false);
352  ROS_DEBUG_STREAM("Found homography of object " << objects_.at(i).getName() << " (instance " << j << ") in the validation image does not match the homography found in the scene");
353  }
354  } else {
355  ROS_DEBUG_STREAM("Object " << objects_.at(i).getName() << " (instance " << j << ") could not be found in the validation image");
356  if (config_.aggressivePoseValidation) {
357  ROS_DEBUG_STREAM("Object " << objects_.at(i).getName() << " (instance " << j << ") is set as invalid due to the aggressive validation strategy");
358  pose_recs.at(i)->getResults().at(j)->setModelFound(false);
359  pose_recs.at(i)->getResults().at(j)->setPoseValid(false);
360  }
361  }
362  }
363  }
364  }
365  }
366 
367  validation_duration = ros::Time::now() - validation_time_start;
368  }
369 
370 
371  //visualisation
372  ros::Time visualisation_time_start = ros::Time::now();
373 
374  //publish markers
375  if (config_.evaluation) {
377  }
378  clearMarkers();
379  last_frame_positions_.clear();
380  for (unsigned int i = 0; i < pose_recs.size(); i++) {
381  std::string name = pose_recs.at(i)->getObjectDescriptor()->getName();
382  std::string mesh_path = pose_recs.at(i)->getObjectDescriptor()->getMesh();
383  Object2DPositions positions(i);
384  if (config_.useVisualisationColor) {
385  overlaySceneWith2DResults(pose_recs.at(i), &scene_image);
386  } else {
387  overlaySceneWith2DResults(pose_recs.at(i), &scene_image_mono);
388  }
389  for (unsigned int j = 0; j < pose_recs.at(i)->getResults().size(); j++) {
390  if (pose_recs.at(i)->getResults().at(j)->checkModelFound()) {
391  asr_msgs::AsrObjectPtr object = createAsrMessage(pose_recs.at(i), j, input_point_cloud_with_guppy->header);
392  geometry_msgs::PoseWithCovariance obj_pose = object->sampledPoses.front();
393  if (config_.evaluation) {
394  HalconCpp::HQuaternion quat;
395  HalconCpp::HTuple quat_tuple = HalconCpp::HTuple::TupleGenConst(4, 0);
396  quat_tuple[0] = obj_pose.pose.orientation.w;
397  quat_tuple[1] = obj_pose.pose.orientation.x;
398  quat_tuple[2] = obj_pose.pose.orientation.y;
399  quat_tuple[3] = obj_pose.pose.orientation.z;
400  quat.SetFromTuple(quat_tuple);
401 
402  double axis_x_x, axis_x_y, axis_x_z;
403  axis_x_x = quat.QuatRotatePoint3d(1, 0, 0, &axis_x_y, &axis_x_z);
404  double axis_y_x, axis_y_y, axis_y_z;
405  axis_y_x = quat.QuatRotatePoint3d(0, 1, 0, &axis_y_y, &axis_y_z);
406  double axis_z_x, axis_z_y, axis_z_z;
407  axis_z_x = quat.QuatRotatePoint3d(0, 0, 1, &axis_z_y, &axis_z_z);
408 
409  outstream_poses_ << object->type << "_" << object->identifier << ";" << obj_pose.pose.position.x << ";" << obj_pose.pose.position.y << ";" << obj_pose.pose.position.z << ";" << obj_pose.pose.orientation.w << ";" << obj_pose.pose.orientation.x << ";" << obj_pose.pose.orientation.y << ";" << obj_pose.pose.orientation.z << ";" << pose_recs.at(i)->getResults().at(j)->getScore2D() << ";" << pose_recs.at(i)->getResults().at(j)->getScore3D() << ";" << axis_x_x << ";" << axis_x_y << ";" << axis_x_z << ";" << axis_y_x << ";" << axis_y_y << ";" << axis_y_z << ";" << axis_z_x << ";" << axis_z_y << ";" << axis_z_z << "#";
410  }
411  ROS_INFO_STREAM("------- " << name << " found: " << obj_pose.pose.position.x << " " << obj_pose.pose.position.y << " " << obj_pose.pose.position.z << " " << obj_pose.pose.orientation.w << " " << obj_pose.pose.orientation.x << " " << obj_pose.pose.orientation.y << " " << obj_pose.pose.orientation.z << "; score 2D: " << pose_recs.at(i)->getResults().at(j)->getScore2D() << ", score 3D: " << pose_recs.at(i)->getResults().at(j)->getScore3D());
412  createMarker(object, mesh_path);
413  positions.addPosition(pose_recs.at(i)->getResults().at(j)->getTexPoint(), pose_recs.at(i)->getResults().at(j)->getSearchRadius());
414  objects_pub_.publish(object);
415  } else {
416  if (pose_recs.at(i)->getResults().at(j)->getPoseValid()) {
417  ROS_INFO_STREAM("------- " << name << " not found but texture found with score: " << pose_recs.at(i)->getResults().at(j)->getScore2D());
418  } else {
419  ROS_INFO_STREAM("------- " << name << " found but pose is invalid");
420  }
421  }
422  }
423  last_frame_positions_.push_back(positions);
424  }
425 
426  //publish result cloud
427  pcl::PointCloud<pcl::PointXYZRGB>::Ptr result_cloud = createVisualizationCloud(pose_recs, input_point_cloud_with_guppy->header);
428  result_cloud->header = pcl_conversions::toPCL(input_point_cloud_with_guppy->header);
429  sensor_msgs::PointCloud2 reduced_cloud_sensor_msg;
430  pcl::toROSMsg(*result_cloud, reduced_cloud_sensor_msg);
431  cloud_pub_.publish(reduced_cloud_sensor_msg);
432 
433  //publish reduced clouds' bounding boxes (created in create_result_point_cloud)
435 
436  //publish 2D results
437  halcon_bridge::HalconImagePtr ptr = boost::make_shared<halcon_bridge::HalconImage>();
438  ptr->image = new HalconCpp::HImage();
439  if (config_.useVisualisationColor) {
440  ptr->header = input_image_guppy->header;
441  ptr->encoding = input_image_guppy->encoding;
442  *ptr->image = scene_image;
443  } else {
444  ptr->header = input_image_guppy_mono->header;
445  ptr->encoding = input_image_guppy_mono->encoding;
446  *ptr->image = scene_image_mono;
447  }
448  sensor_msgs::ImagePtr result = ptr->toImageMsg();
449  image_pub_.publish(result);
450 
451  //show durations
452  ROS_INFO_STREAM("Time for search: " << search_duration << " seconds");
453  if (pose_val_.isInitialized()) {
454  ROS_INFO_STREAM("Time for pose validation: " << validation_duration << " seconds");
455  }
456  ROS_INFO_STREAM("Time for visualisation: " << ros::Time::now() - visualisation_time_start << " seconds");
457  ROS_INFO_STREAM("Time for frame: " << ros::Time::now() - time_frame_enter << " seconds");
458 
459  //evaluation
460  if (config_.evaluation) {
461  outstream_times_ << frame_counter_ << "#" << search_duration << ";";
462  if (pose_val_.isInitialized()) {
463  outstream_times_ << validation_duration << ";";
464  } else {
465  outstream_times_ << "-;";
466  }
467  outstream_times_ << ros::Time::now() - visualisation_time_start << ";" << ros::Time::now() - time_frame_enter << ";" << std::endl;
468  outstream_poses_ << std::endl;
469  }
470 
471 
472  std::cout << std::endl; //end line
473 
474  for (unsigned int i = 0; i < objects_.size(); i++) {
475  delete pose_recs.at(i);
476  }
477  }
478 }
479 
480 
481 void DescriptorSurfaceBasedRecognition::configCallback(DescriptorSurfaceBasedRecognitionConfig &config, uint32_t level)
482 {
483  this->config_ = config;
484 }
485 
486 
488 {
489  pose_rec->findPoses();
490 }
491 
492 
493 asr_msgs::AsrObjectPtr DescriptorSurfaceBasedRecognition::createAsrMessage(PoseRecognition *pose_rec, int results_index, std_msgs::Header header)
494 {
495  HalconCpp::HPose pose;
496  pose.SetFromTuple(pose_rec->getResults().at(results_index)->getPose());
497  HalconCpp::HQuaternion quaternion;
498  quaternion.PoseToQuat(pose);
499  quaternion = quaternion.QuatCompose(pose_rec->getResults().at(results_index)->getAdjustedRotation());
500 
501  asr_msgs::AsrObjectPtr object;
502  object.reset(new asr_msgs::AsrObject());
503 
504  object->header = header;
505  object->providedBy = "descriptor_surface_based_recognition";
506 
507  geometry_msgs::Pose current_pose;
508 
509  current_pose.position.x = (double)pose.ConvertToTuple()[0];
510  current_pose.position.y = (double)pose.ConvertToTuple()[1];
511  current_pose.position.z = (double)pose.ConvertToTuple()[2];
512 
513  current_pose.orientation.w = (double)quaternion.ConvertToTuple().ToDArr()[0];
514  current_pose.orientation.x = (double)quaternion.ConvertToTuple().ToDArr()[1];
515  current_pose.orientation.y = (double)quaternion.ConvertToTuple().ToDArr()[2];
516  current_pose.orientation.z = (double)quaternion.ConvertToTuple().ToDArr()[3];
517 
518  geometry_msgs::PoseWithCovariance current_pose_with_c;
519  current_pose_with_c.pose = current_pose;
520  for(unsigned int i = 0; i < current_pose_with_c.covariance.size(); i++)
521  current_pose_with_c.covariance.at(i) = 0.0f;
522 
523  object->sampledPoses.push_back(current_pose_with_c);
524 
525  HalconCpp::HTuple bounding_corners = pose_rec->getObjectDescriptor()->getSurfaceModel().GetSurfaceModelParam("bounding_box1");
526  boost::array< ::geometry_msgs::Point_<std::allocator<void> > , 8> bounding_box;
527  for (unsigned int z = 0; z < 2; z++) {
528  for (unsigned int y = 0; y < 2; y++) {
529  for (unsigned int x = 0; x < 2; x++) {
530  bounding_box[4 * z + 2 * y + x].x = (double)bounding_corners[x * 3];
531  bounding_box[4 * z + 2 * y + x].y = (double)bounding_corners[y * 3 + 1];
532  bounding_box[4 * z + 2 * y + x].z = (double)bounding_corners[z * 3 + 2];
533  }
534  }
535  }
536  object->boundingBox = bounding_box;
537 
538  object->colorName = "textured";
539  object->type = pose_rec->getObjectDescriptor()->getName();
540 
541  object->identifier = boost::lexical_cast<std::string>(results_index);
542  object->meshResourcePath = pose_rec->getObjectDescriptor()->getMesh();
543 
544  // sizeConfidence: score of 3D-recognition (== number of scene points that lie on the surface of the found object / number of model points)
545  // typeConfidence: score of 2D-recognition (== number of found features / number of all features)
546  object->sizeConfidence = pose_rec->getResults().at(results_index)->getScore3D();
547  object->typeConfidence = pose_rec->getResults().at(results_index)->getScore2D();
548  return object;
549 }
550 
551 
552 void DescriptorSurfaceBasedRecognition::createMarker(asr_msgs::AsrObjectPtr &object, std::string &mesh_path)
553 {
554  visualization_msgs::Marker marker;
555  marker.header = object->header;
556  marker.ns = "Recognition results";
557  marker.id = msgs_marker_array_->markers.size() + 1;
558  marker.action = visualization_msgs::Marker::ADD;
559  marker.pose.position = object->sampledPoses.front().pose.position;
560 
561  marker.pose.orientation = object->sampledPoses.front().pose.orientation;
562 
563  marker.color.a = 0;
564  marker.color.r = 0;
565  marker.color.g = 0;
566  marker.color.b = 0;
567  marker.scale.x = 0.001;
568  marker.scale.y = 0.001;
569  marker.scale.z = 0.001;
570 
571  marker.mesh_use_embedded_materials = true;
572  marker.type = visualization_msgs::Marker::MESH_RESOURCE;
573  marker.mesh_resource = mesh_path;
574  marker.lifetime = ros::Duration(5);
575 
576  msgs_marker_array_->markers.push_back(marker);
577  marker_pub_.publish(marker);
578 }
579 
580 
582 {
583  for (unsigned int i = 0; i < msgs_marker_array_->markers.size(); i++) {
584  msgs_marker_array_->markers[i].action = visualization_msgs::Marker::DELETE;
585  }
586  for (unsigned int i = 0; i < msgs_box_marker_array_->markers.size(); i++) {
587  msgs_box_marker_array_->markers[i].action = visualization_msgs::Marker::DELETE;
588  }
589  msgs_marker_array_->markers.clear();
590 
592  msgs_box_marker_array_->markers.clear();
593 }
594 
595 
596 void DescriptorSurfaceBasedRecognition::createBoxMarker(RecognitionResult *result, std_msgs::Header header, rgb color, bool drawCompleteBoxes)
597 {
598  pcl::PointXYZ min;
599  pcl::PointXYZ max;
600  pcl::getMinMax3D(*(result->getSearchCloud()), min, max);
601 
602  visualization_msgs::Marker marker;
603  marker.header = header;
604  marker.ns = "Result Bounding Boxes";
605  marker.id = msgs_box_marker_array_->markers.size() + 1;
606  marker.action = visualization_msgs::Marker::ADD;
607 
608  marker.pose.orientation.w = 1.0;
609  marker.type = visualization_msgs::Marker::LINE_LIST;
610 
611  marker.scale.x = 0.01;
612 
613  marker.color.r = color.r;
614  marker.color.g = color.g;
615  marker.color.b = color.b;
616  marker.color.a = 1.0;
617 
618  marker.lifetime = ros::Duration();
619 
620  geometry_msgs::Point p;
621 
622  //upper-front line
623  p.x = min.x;
624  p.y = min.y;
625  p.z = min.z;
626  marker.points.push_back(p);
627  p.x = max.x;
628  marker.points.push_back(p);
629 
630  //right-front line
631  marker.points.push_back(p);
632  p.y = max.y;
633  marker.points.push_back(p);
634 
635  //lower-front line
636  marker.points.push_back(p);
637  p.x = min.x;
638  marker.points.push_back(p);
639 
640  //left-front line
641  marker.points.push_back(p);
642  p.y = min.y;
643  marker.points.push_back(p);
644 
645 
646  if (drawCompleteBoxes) {
647  //upper-back line
648  p.x = min.x;
649  p.y = min.y;
650  p.z = max.z;
651  marker.points.push_back(p);
652  p.x = max.x;
653  marker.points.push_back(p);
654 
655  //right-back line
656  marker.points.push_back(p);
657  p.y = max.y;
658  marker.points.push_back(p);
659 
660  //lower-back line
661  marker.points.push_back(p);
662  p.x = min.x;
663  marker.points.push_back(p);
664 
665  //left-back line
666  marker.points.push_back(p);
667  p.y = min.y;
668  marker.points.push_back(p);
669 
670 
671  //top-left-conn line
672  p.x = min.x;
673  p.y = min.y;
674  p.z = min.z;
675  marker.points.push_back(p);
676  p.z = max.z;
677  marker.points.push_back(p);
678 
679  //top-right-conn line
680  p.x = max.x;
681  p.y = min.y;
682  p.z = min.z;
683  marker.points.push_back(p);
684  p.z = max.z;
685  marker.points.push_back(p);
686 
687  //bottom-right-conn line
688  p.x = max.x;
689  p.y = max.y;
690  p.z = min.z;
691  marker.points.push_back(p);
692  p.z = max.z;
693  marker.points.push_back(p);
694 
695  //bottom-left-conn line
696  p.x = min.x;
697  p.y = max.y;
698  p.z = min.z;
699  marker.points.push_back(p);
700  p.z = max.z;
701  marker.points.push_back(p);
702  }
703 
704  msgs_box_marker_array_->markers.push_back(marker);
705 }
706 
707 
709 {
710  const int BOX_BORDER_SIZE = config_.boundingBoxBorderSize;
711  const int ORIENTATION_TRIANGLE_SIZE = config_.orientationTriangleSize;
712  const int POINTS_RADIUS = config_.featurePointsRadius;
713 
714  HalconCpp::HTuple color_points, color_box;
715  if (config_.useVisualisationColor) {
716  color_points.Append(config_.visualisationColorPointsRed);
717  color_points.Append(config_.visualisationColorPointsGreen);
718  color_points.Append(config_.visualisationColorPointsBlue);
719  color_box.Append(config_.visualisationColorBoxRed);
720  color_box.Append(config_.visualisationColorBoxGreen);
721  color_box.Append(config_.visualisationColorBoxBlue);
722  } else {
723  color_points.Append(255);
724  color_box.Append(255);
725  }
726 
727  for (unsigned int i = 0; i < pose_rec->getResults().size(); i++) {
728 
729  //paint feature points
730  HalconCpp::HTuple row, column;
731  HalconCpp::HRegion search_points;
732  for (unsigned int k = 0; k <= i; k++) {
733  try {
734  pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getDescModel().GetDescriptorModelPoints("search", k, &row, &column);
735  search_points.GenCircle(row, column, HalconCpp::HTuple::TupleGenConst(row.Length(), POINTS_RADIUS));
736  *scene_image = search_points.PaintRegion(*scene_image, color_points, HalconCpp::HTuple("fill"));
737  }catch(HalconCpp::HException exc) {
738  }
739  }
740 
741  //paint bounding box (create inner and outer box and paint difference to get a visible border)
742  pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getDescModel().GetDescriptorModelPoints("model", "all", &row, &column);
743  HalconCpp::HRegion bounding_box;
744  bounding_box.GenRegionPoints(row, column);
745  Hlong row1, column1, row2, column2;
746  bounding_box.SmallestRectangle1(&row1, &column1, &row2, &column2);
747 
748  HalconCpp::HTuple rows, columns;
749  double temp;
750  double upperLeftInnerX, upperRightInnerX, lowerRightInnerX, lowerLeftInnerX;
751  double upperLeftInnerY, upperRightInnerY, lowerRightInnerY, lowerLeftInnerY;
752  upperLeftInnerX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(0)[1], pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(0)[0], 1.0, &upperLeftInnerY, &temp);
753  upperLeftInnerX = upperLeftInnerX / temp;
754  upperLeftInnerY = upperLeftInnerY / temp;
755  rows.Append((int)upperLeftInnerX);
756  columns.Append((int)upperLeftInnerY);
757 
758  upperRightInnerX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(1)[1], pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(1)[0], 1.0, &upperRightInnerY, &temp);
759  rows.Append((int)(upperRightInnerX / temp));
760  columns.Append((int)(upperRightInnerY / temp));
761 
762  lowerRightInnerX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(2)[1], pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(2)[0], 1.0, &lowerRightInnerY, &temp);
763  rows.Append((int)(lowerRightInnerX / temp));
764  columns.Append((int)(lowerRightInnerY / temp));
765 
766  lowerLeftInnerX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(3)[1], pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(3)[0], 1.0, &lowerLeftInnerY, &temp);
767  rows.Append((int)(lowerLeftInnerX / temp));
768  columns.Append((int)(lowerLeftInnerY / temp));
769 
770  rows.Append((int)upperLeftInnerX);
771  columns.Append((int)upperLeftInnerY);
772 
773  HalconCpp::HRegion inner_box;
774  inner_box.GenRegionPolygonFilled(rows, columns);
775 
776  HalconCpp::HTuple rows_outer, columns_outer;
777  double temp_outer;
778  double upperLeftOuterX, upperRightOuterX, lowerRightOuterX, lowerLeftouterX;
779  double upperLeftOuterY, upperRightOuterY, lowerRightOuterY, lowerLeftOuterY;
780  upperLeftOuterX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(0)[1] - BOX_BORDER_SIZE, pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(0)[0] - BOX_BORDER_SIZE, 1.0, &upperLeftOuterY, &temp_outer);
781  upperLeftOuterX = upperLeftOuterX / temp_outer;
782  upperLeftOuterY = upperLeftOuterY / temp_outer;
783  rows_outer.Append((int)upperLeftOuterX);
784  columns_outer.Append((int)upperLeftOuterY);
785 
786  upperRightOuterX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(1)[1] - BOX_BORDER_SIZE, pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(1)[0] + BOX_BORDER_SIZE, 1.0, &upperRightOuterY, &temp_outer);
787  rows_outer.Append((int)(upperRightOuterX / temp_outer));
788  columns_outer.Append((int)(upperRightOuterY / temp_outer));
789 
790  lowerRightOuterX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(2)[1] + BOX_BORDER_SIZE, pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(2)[0] + BOX_BORDER_SIZE, 1.0, &lowerRightOuterY, &temp_outer);
791  rows_outer.Append((int)(lowerRightOuterX / temp_outer));
792  columns_outer.Append((int)(lowerRightOuterY / temp_outer));
793 
794  lowerLeftouterX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(3)[1] + BOX_BORDER_SIZE, pose_rec->getObjectDescriptor()->getModelViewDescriptors().at(pose_rec->getResults().at(i)->getViewIndex()).getBoxCorners().at(3)[0] - BOX_BORDER_SIZE, 1.0, &lowerLeftOuterY, &temp_outer);
795  rows_outer.Append((int)(lowerLeftouterX / temp_outer));
796  columns_outer.Append((int)(lowerLeftOuterY / temp_outer));
797 
798  rows_outer.Append((int)upperLeftOuterX);
799  columns_outer.Append((int)upperLeftOuterY);
800 
801  HalconCpp::HRegion outer_box;
802  outer_box.GenRegionPolygonFilled(rows_outer, columns_outer);
803 
804  inner_box = outer_box.Difference(inner_box);
805 
806  *scene_image = inner_box.PaintRegion(*scene_image, color_box, HalconCpp::HTuple("fill"));
807 
808 
809  //get search radius for next frame
810  Hlong r, c, r2, c2;
811  inner_box.SmallestRectangle1(&r, &c, &r2, &c2);
812  int max_expansion = r2 - r;
813  if (c2 - c > max_expansion) {
814  max_expansion = c2 - c;
815  }
816  pose_rec->getResults().at(i)->setSearchRadius((int)(max_expansion * 0.5));
817 
818 
819  //paint orientation triangle
820  rows.Clear();
821  columns.Clear();
822  double triLeftX, triRightX, triUpX;
823  double triLeftY, triRightY, triUpY;
824  triLeftX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(ORIENTATION_TRIANGLE_SIZE, -1 * ORIENTATION_TRIANGLE_SIZE, 1.0, &triLeftY, &temp);
825  triLeftX = triLeftX / temp;
826  triLeftY = triLeftY / temp;
827  rows.Append((int)triLeftX);
828  columns.Append((int)triLeftY);
829 
830  triRightX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(ORIENTATION_TRIANGLE_SIZE, ORIENTATION_TRIANGLE_SIZE, 1.0, &triRightY, &temp);
831  rows.Append((int)(triRightX / temp));
832  columns.Append((int)(triRightY / temp));
833 
834  triUpX = pose_rec->getResults().at(i)->getTransMatrix2D().ProjectiveTransPoint2d(-2.5 * ORIENTATION_TRIANGLE_SIZE, 0, 1.0, &triUpY, &temp);
835  rows.Append((int)(triUpX / temp));
836  columns.Append((int)(triUpY / temp));
837 
838  rows.Append((int)triLeftX);
839  columns.Append((int)triLeftY);
840 
841  HalconCpp::HRegion orientation_triangle;
842  orientation_triangle.GenRegionPolygonFilled(rows, columns);
843  *scene_image = orientation_triangle.PaintRegion(*scene_image, color_box, HalconCpp::HTuple("fill"));
844  }
845 }
846 
847 
848 pcl::PointCloud<pcl::PointXYZRGB>::Ptr DescriptorSurfaceBasedRecognition::createVisualizationCloud(std::vector<PoseRecognition*> &pose_recs, std_msgs::Header header)
849 {
850  pcl::PointCloud<pcl::PointXYZRGB>::Ptr result_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
851 
852  int num_colors = 0;
853  for (unsigned int i = 0; i < pose_recs.size(); i++) {
854  for(unsigned int j = 0; j < pose_recs.at(i)->getResults().size(); j++) {
855  if (pose_recs.at(i)->getResults().at(j)->checkModelFound()) {
856  num_colors++;
857  }
858  }
859  }
860  std::vector<rgb> colors;
861  if (num_colors > 0) {
862  for(int i = 0; i < 360; i += 360 / num_colors) {
863  hsv c;
864  c.h = i;
865  c.s = 0.9;
866  c.v = 0.5;
867  colors.push_back(hsv2rgb(c));
868  }
869  } else {
870  hsv c;
871  c.h = 0;
872  c.s = 0.9;
873  c.v = 0.5;
874  colors.push_back(hsv2rgb(c));
875  }
876  int counter = 0;
877  for (unsigned int i = 0; i < pose_recs.size(); i++) {
878  for(unsigned int j = 0; j < pose_recs.at(i)->getResults().size(); j++) {
879  if (pose_recs.at(i)->getResults().at(j)->checkModelFound()) {
880 
881  //draw cloud boxes
882  if (config_.drawCloudBoxes) {
883  createBoxMarker(pose_recs.at(i)->getResults().at(j), header, colors.at(counter), config_.drawCompleteCloudBoxes);
884  }
885 
886  pcl::PointCloud<pcl::PointXYZRGB>::Ptr tmp(new pcl::PointCloud<pcl::PointXYZRGB>);
887  pcl::copyPointCloud(*(pose_recs.at(i)->getResults().at(j)->getSearchCloud()), *tmp);
888 
889  for(unsigned int j = 0; j < tmp->width * tmp->height; j++) {
890  tmp->points[j].r = colors.at(counter).r * 255;
891  tmp->points[j].g = colors.at(counter).g * 255;
892  tmp->points[j].b = colors.at(counter).b * 255;
893  }
894  *result_cloud += *tmp;
895 
896  counter = (counter + 1) % colors.size();
897  }
898  }
899  }
900 
901  if (counter == 0) {
902  //Dummy point, to show "empty" cloud in rviz
903  pcl::PointXYZRGB point;
904  point.x = 1000;
905  point.y = 1000;
906  point.z = 1000;
907  result_cloud->push_back(point);
908  }
909  return result_cloud;
910 }
911 
912 }
913 
914 int
915 main (int argc, char** argv)
916 {
917 
918  ros::init (argc, argv, "asr_descriptor_surface_based_recognition");
919 
921 
922  ros::spin();
923 
924  return 0;
925 }
void wait(size_t task_threshold=0) const
Definition: pool.hpp:176
std::vector< RecognitionResult * > getResults() const
ServiceClient serviceClient(const std::string &service_name, bool persistent=false, const M_string &header_values=M_string())
static const std::string OUTPUT_EVALUATION_FILE_TIME("global_times.txt")
static const std::string RELEASE_RECOGNIZER_SERVICE_NAME("release_recognizer")
bool compareHomographyMatrices(HalconCpp::HHomMat2D original, HalconCpp::HHomMat2D rendered, int pose_validation_dist_err)
Compares the given matrices and returns whether they describe the same projection.
bool processReleaseRecognizerRequest(ReleaseRecognizer::Request &req, ReleaseRecognizer::Response &res)
bool processGetObjectListRequest(GetObjectList::Request &req, GetObjectList::Response &res)
void createMarker(asr_msgs::AsrObjectPtr &object, std::string &mesh_path)
Adds a marker which indicates the pose of the found object to the array containing all found markers ...
pcl::PointCloud< pcl::PointXYZRGB >::Ptr createVisualizationCloud(std::vector< PoseRecognition * > &pose_recs, std_msgs::Header header)
Creates a single, colored point cloud from the reduced point clouds which were used to recognize the ...
void publish(const boost::shared_ptr< M > &message) const
static const std::string OUTPUT_EVALUATION_FILE_POSES("global_poses.txt")
void addPosition(Eigen::Vector2i position, int search_radius)
Adds a new object instance position to the list.
dynamic_reconfigure::Server< DescriptorSurfaceBasedRecognitionConfig > reconfigure_server_
static const std::string OUTPUT_EVALUATION_DIR("eval")
void configCallback(DescriptorSurfaceBasedRecognitionConfig &config_, uint32_t level)
The callback function which is called when the configuration file has changed.
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
bool call(MReq &req, MRes &res)
static const std::string GET_RECOGNIZER_SERVICE_NAME("get_recognizer")
void fromROSMsg(const sensor_msgs::PointCloud2 &cloud, pcl::PointCloud< T > &pcl_cloud)
bool startObjectRecognition(std::string name, int count, bool use_pose_val)
Adds an object to the list of searchable objects.
void createBoxMarker(RecognitionResult *result, std_msgs::Header header, rgb color, bool drawCompleteBoxes)
Adds a bounding box marker which indicates the size of the reduced point cloud used to recognize the ...
bool schedule(task_type const &task)
Definition: pool.hpp:130
ServiceServer advertiseService(const std::string &service, bool(T::*srv_func)(MReq &, MRes &), T *obj)
Ogre::MeshPtr loadMeshFromResource(const std::string &resource_path)
TFSIMD_FORCE_INLINE const tfScalar & y() const
pcl::PointCloud< pcl::PointXYZ >::Ptr getSearchCloud() const
bool processGetRecognizerRequest(GetRecognizer::Request &req, GetRecognizer::Response &res)
ROSCPP_DECL void spin(Spinner &spinner)
message_filters::sync_policies::ApproximateTime< sensor_msgs::Image, sensor_msgs::Image, sensor_msgs::PointCloud2 > ApproximatePolicy
void threadTask(PoseRecognition *pose_rec)
The task given to each thread in the threadpool (=> Search a specific object)
fifo_pool pool
A standard pool.
Definition: pool.hpp:226
static const std::string OBJECT_DB_SERVICE_OBJECT_TYPE("/asr_object_database/object_meta_data")
void findPoses()
Recognizes the present object instances in the scene.
bool isInitialized() const
Returns whether this object has been initialized.
static const std::string OBJECT_DB_SERVICE_OBJECT_MESHES("/asr_object_database/recognizer_list_meshes")
TFSIMD_FORCE_INLINE const tfScalar & x() const
std::vector< ObjectViewDescriptor > getModelViewDescriptors() const
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
#define ROS_DEBUG_STREAM(args)
ROSLIB_DECL std::string getPath(const std::string &package_name)
TFSIMD_FORCE_INLINE const tfScalar & z() const
rgb hsv2rgb(hsv in)
Converts the given hsv-color to rgb.
Definition: util.cpp:151
#define ROS_INFO_STREAM(args)
void subscribe(ros::NodeHandle &nh, const std::string &topic, uint32_t queue_size, const ros::TransportHints &transport_hints=ros::TransportHints(), ros::CallbackQueueInterface *callback_queue=0)
void overlaySceneWith2DResults(PoseRecognition *pose_rec, HalconCpp::HImage *scene_image)
Paints the results of the 2D recognition (bounding box, feature points & orientation triangle) to the...
int min(int a, int b)
static const std::string OBJECT_DATABASE_CATEGORY("descriptor")
bool getParam(const std::string &key, std::string &s) const
bool processClearAllRecognizersRequest(ClearAllRecognizers::Request &req, ClearAllRecognizers::Response &res)
void toROSMsg(const sensor_msgs::PointCloud2 &cloud, sensor_msgs::Image &image)
static Time now()
static const std::string NODE_NAME("asr_descriptor_surface_based_recognition")
message_filters::Synchronizer< ApproximatePolicy > ApproximateSync
static const std::string GET_OBJECT_LIST_SERVICE_NAME("get_object_list")
static const std::string CLEAR_ALL_RECOGNIZERS_SERVICE_NAME("clear_all_recognizers")
r
void initializeMeshes()
Loads all available object meshes so they can be used later.
int main(int argc, char **argv)
asr_msgs::AsrObjectPtr createAsrMessage(PoseRecognition *pose_rec, int results_index, std_msgs::Header header)
Creates a Asr-Object-Message containing the results of a found object instance.
HalconCpp::HTuple validateObject(ObjectDescriptor *object, RecognitionResult *recognition_result, Ogre::MeshPtr mesh)
Searches the rendered image for the given object.
void rosCallback(const sensor_msgs::ImageConstPtr &input_image_guppy, const sensor_msgs::ImageConstPtr &input_image_guppy_mono, const sensor_msgs::PointCloud2ConstPtr &input_point_cloud_with_guppy)
The callback function for the ros subscriptions (images and point cloud; called for every new frame) ...
void stopObjectRecognition(std::string name)
Removes an object from the list of searchable objects.
void toPCL(const ros::Time &stamp, pcl::uint64_t &pcl_stamp)


asr_descriptor_surface_based_recognition
Author(s): Allgeyer Tobias, Hutmacher Robin, Meißner Pascal
autogenerated on Mon Dec 16 2019 03:31:15