object_tracker.cpp
Go to the documentation of this file.
1 #include "object_tracker.h"
2 
3 #include <hector_nav_msgs/GetDistanceToObstacle.h>
4 #include <hector_nav_msgs/GetNormal.h>
5 #include <hector_worldmodel_msgs/VerifyObject.h>
6 #include <hector_worldmodel_msgs/VerifyPercept.h>
7 
8 #include <math.h>
9 
10 #include "Object.h"
11 #include "parameters.h"
12 
13 #include <boost/algorithm/string.hpp>
14 
15 namespace hector_object_tracker {
16 
18 {
20  priv_nh.param("frame_id", _frame_id, std::string("map"));
21  priv_nh.param("worldmodel_ns", _worldmodel_ns, std::string("worldmodel"));
22  priv_nh.param("ageing_threshold", _ageing_threshold, 1.0);
23  priv_nh.param("publish_interval", _publish_interval, 0.0);
24 
25  parameter(_project_objects) = false;
28  parameter(_distance_variance) = pow(1.0, 2);
29  parameter(_angle_variance) = pow(10.0 * M_PI / 180.0, 2);
30  parameter(_min_height) = -999.9;
31  parameter(_max_height) = 999.9;
33  parameter(_pending_time) = 0.0;
35  parameter(_active_time) = 0.0;
39 
40  std_msgs::ColorRGBA default_color;
41  default_color.r = 0.8; default_color.a = 1.0;
42  parameter(_marker_color) = default_color;
43 
45 
46  ros::NodeHandle worldmodel(_worldmodel_ns);
47  imagePerceptSubscriber = worldmodel.subscribe("image_percept", 10, &ObjectTracker::imagePerceptCb, this);
48  posePerceptSubscriber = worldmodel.subscribe("pose_percept", 10, &ObjectTracker::posePerceptCb, this);
49  objectAgeingSubscriber = worldmodel.subscribe("object_ageing", 10, &ObjectTracker::objectAgeingCb, this);
50  modelPublisher = worldmodel.advertise<hector_worldmodel_msgs::ObjectModel>("objects", 10, false);
51  modelUpdatePublisher = worldmodel.advertise<hector_worldmodel_msgs::Object>("object", 10, false);
52  modelUpdateSubscriber = worldmodel.subscribe<hector_worldmodel_msgs::ObjectModel>("update", 10, &ObjectTracker::modelUpdateCb, this);
53 
56 
57  sysCommandSubscriber = nh.subscribe("/syscommand", 10, &ObjectTracker::sysCommandCb, this);
58  perceptPoseDebugPublisher = priv_nh.advertise<geometry_msgs::PoseStamped>("percept/pose", 10, false);
59  objectPoseDebugPublisher = priv_nh.advertise<geometry_msgs::PoseStamped>("object/pose", 10, false);
60 
61  XmlRpc::XmlRpcValue verification_services;
62  if (priv_nh.getParam("verification_services", verification_services) && verification_services.getType() == XmlRpc::XmlRpcValue::TypeArray) {
63  for(int i = 0; i < verification_services.size(); ++i) {
64  XmlRpc::XmlRpcValue item = verification_services[i];
65  if (!item.hasMember("service")) {
66  ROS_ERROR("Verification service %d could not be intialized: no service name given", i);
67  continue;
68  }
69  if (!item.hasMember("type")) {
70  ROS_ERROR("Verification service %d could not be intialized: no service type given", i);
71  continue;
72  }
73 
75  if (item["type"] == "object") {
76  *client = nh.serviceClient<hector_worldmodel_msgs::VerifyObject>(item["service"]);
77  } else if (item["type"] == "percept") {
78  *client = nh.serviceClient<hector_worldmodel_msgs::VerifyPercept>(item["service"]);
79  }
80 
81  if (!client || !client->isValid()) continue;
82  if (!client->exists()) {
83  if (!item.hasMember("required") || !item["required"]) {
84  ROS_WARN("Verification service %s is not (yet) there...", client->getService().c_str());
85  } else {
86  ROS_WARN("Required verification service %s is not available... waiting...", client->getService().c_str());
87  while(ros::ok() && !client->waitForExistence(ros::Duration(1.0)));
88  }
89  }
90 
91  std::string class_id;
92  if (item.hasMember("class_id")) class_id = std::string(item["class_id"]);
93 
94  if (item["type"] == "object") {
95  parameter(_object_verification_services, class_id).push_back(std::make_pair(client, item));
96  if (class_id.empty()) {
97  ROS_INFO("Using object verification service %s for all object classes", client->getService().c_str());
98  } else {
99  ROS_INFO("Using object verification service %s for objects of class %s", client->getService().c_str(), class_id.c_str());
100  }
101  } else if (item["type"] == "percept") {
102  parameter(_percept_verification_services, class_id).push_back(std::make_pair(client, item));
103  if (class_id.empty()) {
104  ROS_INFO("Using percept verification service %s for all percept classes", client->getService().c_str());
105  } else {
106  ROS_INFO("Using percept verification service %s for percepts of class %s", client->getService().c_str(), class_id.c_str());
107  }
108  }
109  }
110  }
111 
112 // XmlRpc::XmlRpcValue get_normal_service;
113 // if (priv_nh.getParam("get_normal_services", get_normal_service) && get_normal_service.getType() == XmlRpc::XmlRpcValue::TypeArray) {
114 // for(int i = 0; i < get_normal_service.size(); ++i) {
115 // XmlRpc::XmlRpcValue item = get_normal_service[i];
116 // if (!item.hasMember("service")) {
117 // ROS_ERROR("GetNormal service %d could not be intialized: no service name given", i);
118 // continue;
119 // }
120 
121 // ros::ServiceClientPtr client(new ros::ServiceClient);
122 // *client = nh.serviceClient<hector_nav_msgs::GetNormal>(item["service"]);
123 
124 // if (!client || !client->isValid()) continue;
125 // if (!client->exists()) {
126 // if (!item.hasMember("required") || !item["required"]) {
127 // ROS_WARN("GetNormal service %s is not (yet) there...", client->getService().c_str());
128 // } else {
129 // ROS_WARN("Required GetNormal service %s is not available... waiting...", client->getService().c_str());
130 // while(ros::ok() && !client->waitForExistence(ros::Duration(1.0)));
131 // }
132 // }
133 
134 // if (item.hasMember("class_id")) {
135 // parameter(_get_normal_service, item["class_id"]) = client;
136 // ROS_INFO("Using GetNormal service %s for objects of class %s", client->getService().c_str(), std::string(item["class_id"]).c_str());
137 // } else {
138 // parameter(_get_normal_service) = client;
139 // ROS_INFO("Using GetNormal service %s for all object classes", client->getService().c_str());
140 // }
141 // }
142 // }
143 
144  setObjectState = worldmodel.advertiseService("set_object_state", &ObjectTracker::setObjectStateCb, this);
145  setObjectName = worldmodel.advertiseService("set_object_name", &ObjectTracker::setObjectNameCb, this);
146  addObject = worldmodel.advertiseService("add_object", &ObjectTracker::addObjectCb, this);
147  getObjectModel = worldmodel.advertiseService("get_object_model", &ObjectTracker::getObjectModelCb, this);
148 
149  XmlRpc::XmlRpcValue merge;
150  if (priv_nh.getParam("merge", merge) && merge.getType() == XmlRpc::XmlRpcValue::TypeArray) {
151  for(int i = 0; i < merge.size(); ++i) {
152  const MergedModelPtr& info = *merged_models.insert(merged_models.end(), boost::make_shared<MergedModelInfo>());
153  ros::SubscribeOptions options = ros::SubscribeOptions::create<hector_worldmodel_msgs::ObjectModel>(std::string(), 10, boost::bind(&ObjectTracker::mergeModelCallback, this, _1, info), ros::VoidConstPtr(), 0);
154  if (merge[i].getType() == XmlRpc::XmlRpcValue::TypeStruct && merge[i].hasMember("topic")) {
155  options.topic = static_cast<std::string>(merge[i]["topic"]);
156  if (merge[i].hasMember("prefix")) info->prefix = static_cast<std::string>(merge[i]["prefix"]);
157  } else if (merge[i].getType() == XmlRpc::XmlRpcValue::TypeString) {
158  options.topic = static_cast<std::string>(merge[i]);
159  }
160 
161  if (options.topic.empty()) {
162  ROS_ERROR("Each entry in parameter merge must be either a string containing the topic to subscribe or a struct.");
163  continue;
164  }
165  info->subscriber = nh.subscribe(options);
166 
167  ROS_INFO("Subscribed to object model %s.", options.topic.c_str());
168  }
169  }
170 
171  XmlRpc::XmlRpcValue negative_update;
172  if (priv_nh.getParam("negative_update", negative_update) && negative_update.getType() == XmlRpc::XmlRpcValue::TypeArray) {
173  for(int i = 0; i < negative_update.size(); ++i) {
174  if (negative_update[i].getType() != XmlRpc::XmlRpcValue::TypeStruct) continue;
175  const NegativeUpdatePtr& info = *negativeUpdate.insert(negativeUpdate.end(), boost::make_shared<NegativeUpdateInfo>());
176 
177  // default options
178  info->negative_support = 0.0;
179  info->min_support = 0.0;
180  info->min_distance = 0.0;
181  info->max_distance = 0.0;
182  info->ignore_border_pixels = 0.0;
183  info->not_seen_duration = ros::Duration(0.5);
184 
185  ros::SubscribeOptions options = ros::SubscribeOptions::create<sensor_msgs::CameraInfo>(std::string(), 10, boost::bind(&ObjectTracker::negativeUpdateCallback, this, _1, info), ros::VoidConstPtr(), 0);
186  if (negative_update[i].hasMember("topic")) options.topic = static_cast<std::string>(negative_update[i]["topic"]);
187  if (negative_update[i].hasMember("class_id")) info->class_id = static_cast<std::string>(negative_update[i]["class_id"]);
188  if (negative_update[i].hasMember("negative_support")) info->negative_support = static_cast<double>(negative_update[i]["negative_support"]);
189  if (negative_update[i].hasMember("min_support")) info->min_support = static_cast<double>(negative_update[i]["min_support"]);
190  if (negative_update[i].hasMember("min_distance")) info->min_distance = static_cast<double>(negative_update[i]["min_distance"]);
191  if (negative_update[i].hasMember("max_distance")) info->max_distance = static_cast<double>(negative_update[i]["max_distance"]);
192  if (negative_update[i].hasMember("ignore_border_pixels")) info->ignore_border_pixels = static_cast<double>(negative_update[i]["ignore_border_pixels"]);
193  if (negative_update[i].hasMember("not_seen_duration")) info->not_seen_duration = ros::Duration(static_cast<double>(negative_update[i]["not_seen_duration"]));
194 
195  if (options.topic.empty()) {
196  ROS_ERROR("Each entry in parameter negative_update must have a camera_info topic to subscribe to.");
197  continue;
198  }
199  info->subscriber = nh.subscribe(options);
200  }
201  }
202 
203  if (_publish_interval > 0.0) {
205  }
206 }
207 
209 {}
210 
211 void ObjectTracker::sysCommandCb(const std_msgs::StringConstPtr &sysCommand)
212 {
213  if (sysCommand->data == "reset") {
214  ROS_INFO("Resetting object model.");
215  model.reset();
216  drawings.reset();
217  for(std::vector<MergedModelPtr>::const_iterator it = merged_models.begin(); it != merged_models.end(); ++it) {
218  (*it)->model.reset();
219  }
220  }
221 }
222 
223 void ObjectTracker::imagePerceptCb(const hector_worldmodel_msgs::ImagePerceptConstPtr &percept)
224 {
225  hector_worldmodel_msgs::PosePerceptPtr posePercept(new hector_worldmodel_msgs::PosePercept);
226  tf::Pose pose;
227 
228  Parameters::load(percept->info.class_id);
229 
230  ROS_DEBUG("Incoming image percept with image coordinates [%f,%f] in frame %s", percept->x, percept->y, percept->header.frame_id.c_str());
231 
232  posePercept->header = percept->header;
233  posePercept->info = percept->info;
234 
235  // retrieve distance information
236  float distance = percept->distance > 0.0 ? percept->distance : parameter(_default_distance, percept->info.class_id);
237 
238  // retrieve camera model from either the cache or from CameraInfo given in the percept
239  CameraModelPtr cameraModel = getCameraModel(percept->header.frame_id, percept->camera_info);
240 
241  // transform Point using the camera model
242  cv::Point2d rectified = cameraModel->rectifyPoint(cv::Point2d(percept->x, percept->y));
243  cv::Point3d direction_cv = cameraModel->projectPixelTo3dRay(rectified);
244  tf::Point direction(direction_cv.x, direction_cv.y, direction_cv.z);
245  direction.normalize();
246 // pose.setOrigin(tf::Point(direction_cv.z, -direction_cv.x, -direction_cv.y).normalized() * distance);
247 // tf::Quaternion direction(atan2(-direction_cv.x, direction_cv.z), atan2(direction_cv.y, sqrt(direction_cv.z*direction_cv.z + direction_cv.x*direction_cv.x)), 0.0);
248  pose.setOrigin(tf::Point(direction_cv.x, direction_cv.y, direction_cv.z).normalized() * distance);
249  {
250  // set rotation of object so that the x-axis points in the direction of the object and y-axis is parallel to the camera's x-z-plane
251  // Note: d is given in camera coordinates, while the object's x-axis should point away from the camera.
252  const tf::Point &d(direction); // for readability
253  if (d.y() >= 0.999) {
254  pose.setBasis(tf::Matrix3x3( 0., -1., 0.,
255  1., 0., 0.,
256  0., 0., 1. ));
257  } else if (d.y() <= -0.999) {
258  pose.setBasis(tf::Matrix3x3( 0., -1., 0.,
259  -1., 0., 0.,
260  0., 0., -1.));
261  } else {
262  double c = 1./sqrt(1. - d.y()*d.y());
263 // pose.setBasis(tf::Matrix3x3( c*d.z(), -c*d.x()*d.y(), d.x(),
264 // 0, 1./c, d.y(),
265 // -c*d.x(), -c*d.y()*d.z(), d.z()));
266  pose.setBasis(tf::Matrix3x3(d.x(), -c*d.z(), c*d.x()*d.y(),
267  d.y(), 0, -1./c,
268  d.z(), c*d.x(), c*d.y()*d.z() ));
269  }
270  }
271 
272  ROS_DEBUG("--> Rectified image coordinates: [%f,%f]", rectified.x, rectified.y);
273  ROS_DEBUG("--> Projected 3D ray (OpenCV): [%f,%f,%f]", direction_cv.x, direction_cv.y, direction_cv.z);
274  ROS_DEBUG("--> Projected 3D ray (tf): [%f,%f,%f]", pose.getOrigin().x(), pose.getOrigin().y(),pose.getOrigin().z());
275 
276  if (percept->distance == 0.0 && parameter(_project_objects, percept->info.class_id)) {
277  hector_nav_msgs::GetDistanceToObstacle::Request request;
278  hector_nav_msgs::GetDistanceToObstacle::Response response;
279 
280  // project image percept to the next obstacle
281  request.point.header = percept->header;
282  tf::pointTFToMsg(pose.getOrigin(), request.point.point);
283  ros::ServiceClientPtr client = parameter(_distance_to_obstacle_service, percept->info.class_id);
284  if (client && client->call(request, response)) {
285  if (response.distance > 0.0) {
286  // distance = std::max(response.distance - 0.1f, 0.0f);
287  distance = std::max(response.distance, 0.0f);
288  pose.setOrigin(pose.getOrigin().normalized() * distance);
289  ROS_DEBUG("Projected percept to a distance of %.1f m", distance);
290  } else {
291  ROS_DEBUG("Ignoring percept due to unknown or infinite distance: service %s returned %f", client->getService().c_str(), response.distance);
292  return;
293  }
294  } else {
295  ROS_DEBUG("Ignoring percept due to unknown or infinite distance: service is not available");
296  return;
297  }
298  }
299 
300  // set variance
301  Eigen::Matrix3f covariance(Eigen::Matrix3f::Zero());
302  covariance(0,0) = std::max(distance*distance, 1.0f) * tan(parameter(_angle_variance, percept->info.class_id));
303  covariance(1,1) = covariance(0,0);
304  covariance(2,2) = parameter(_distance_variance, percept->info.class_id);
305 
306  // rotate covariance matrix depending on the position in the image
307  Eigen::Matrix3f rotation_camera_object;
308  rotation_camera_object << pose.getBasis()[0][0], pose.getBasis()[0][1], pose.getBasis()[0][2],
309  pose.getBasis()[1][0], pose.getBasis()[1][1], pose.getBasis()[1][2],
310  pose.getBasis()[2][0], pose.getBasis()[2][1], pose.getBasis()[2][2];
311  covariance = rotation_camera_object * covariance * rotation_camera_object.transpose();
312 
313  // fill posePercept
314  tf::poseTFToMsg(pose, posePercept->pose.pose);
315  // tf::quaternionTFToMsg(direction, posePercept->pose.pose.orientation);
316  posePercept->pose.covariance[0] = covariance(0,0);
317  posePercept->pose.covariance[1] = covariance(0,1);
318  posePercept->pose.covariance[2] = covariance(0,2);
319  posePercept->pose.covariance[6] = covariance(1,0);
320  posePercept->pose.covariance[7] = covariance(1,1);
321  posePercept->pose.covariance[8] = covariance(1,2);
322  posePercept->pose.covariance[12] = covariance(2,0);
323  posePercept->pose.covariance[13] = covariance(2,1);
324  posePercept->pose.covariance[14] = covariance(2,2);
325 
326  // forward to posePercept callback
327  posePerceptCb(posePercept);
328 }
329 
330 void ObjectTracker::posePerceptCb(const hector_worldmodel_msgs::PosePerceptConstPtr &percept)
331 {
332 
333  Parameters::load(percept->info.class_id);
334 
335  // publish pose in source frame for debugging purposes
337  geometry_msgs::PoseStamped pose;
338  pose.pose = percept->pose.pose;
339  pose.header = percept->header;
341  }
342 
343  // call percept verification
344  float support_added_by_percept_verification = 0.0;
345  ServiceClientsWithProperties &percept_verification_services = parameter(_percept_verification_services, percept->info.class_id);
346  if (!percept_verification_services.empty()) {
347  hector_worldmodel_msgs::VerifyPercept::Request request;
348  hector_worldmodel_msgs::VerifyPercept::Response response;
349 
350  request.percept = *percept;
351 
352  for(ServiceClientsWithProperties::iterator it = percept_verification_services.begin(); it != percept_verification_services.end(); ++it) {
353  if (!(it->first) || !(it->first->isValid())) continue;
354 
355  if (it->second.hasMember("ignore") && it->second["ignore"]) {
356  ROS_DEBUG("Calling service %s for percept of class '%s'', but ignoring its answer...", it->first->getService().c_str(), percept->info.class_id.c_str());
357  it->first->call(request, response);
358 
359  } else if (it->first->call(request, response)) {
360  if (response.response == response.DISCARD) {
361  ROS_DEBUG("Discarded percept of class '%s' due to DISCARD message from service %s", percept->info.class_id.c_str(), it->first->getService().c_str());
362  return;
363  }
364  if (response.response == response.CONFIRM) {
365  ROS_DEBUG("We got a CONFIRMation for percept of class '%s' from service %s!", percept->info.class_id.c_str(), it->first->getService().c_str());
366  support_added_by_percept_verification = 100.0;
367  }
368  if (response.response == response.UNKNOWN) {
369  ROS_DEBUG("Verification service %s cannot help us with percept of class %s at the moment :-(", it->first->getService().c_str(), percept->info.class_id.c_str());
370  }
371  } else if (it->second.hasMember("required") && it->second["required"]) {
372  ROS_DEBUG("Discarded percept of class '%s' as required service %s is not available", percept->info.class_id.c_str(), it->first->getService().c_str());
373  return;
374  }
375  }
376  }
377 
378  // convert pose in tf
379  tf::Pose pose;
380  tf::poseMsgToTF(percept->pose.pose, pose);
381 
382  // retrieve distance information
383 // float distance = pose.getOrigin().length();
384 // if (parameter(_project_objects, percept->info.class_id)) {
385 // hector_nav_msgs::GetDistanceToObstacle::Request request;
386 // hector_nav_msgs::GetDistanceToObstacle::Response response;
387 
388 // // project image percept to the next obstacle
389 // request.point.header = percept->header;
390 // tf::pointTFToMsg(pose.getOrigin(), request.point.point);
391 // if (parameter(_distance_to_obstacle_service, percept->info.class_id).call(request, response) && response.distance > 0.0) {
392 // // distance = std::max(response.distance - 0.1f, 0.0f);
393 // distance = std::max(response.distance, 0.0f);
394 // pose.setOrigin(pose.getOrigin().normalized() * distance);
395 // ROS_DEBUG("Projected percept to a distance of %.1f m", distance);
396 // } else {
397 // ROS_DEBUG("Ignoring percept due to unknown or infinite distance");
398 // return;
399 // }
400 // }
401 
402  // extract variance matrix
403  Eigen::Matrix<float,6,6> temp;
404  for(unsigned int i = 0; i < 36; ++i) temp(i) = percept->pose.covariance[i];
405  Eigen::Matrix3f covariance = temp.block<3,3>(0,0);
406 
407  // if no variance is given, set variance to default
408  if (covariance.isZero()) {
409  covariance(0,0) = parameter(_distance_variance, percept->info.class_id);
410  covariance(1,1) = parameter(_distance_variance, percept->info.class_id);
411  covariance(2,2) = parameter(_distance_variance, percept->info.class_id);
412  }
413 
414  // project percept coordinates to map frame
415  tf::StampedTransform cameraTransform;
416  if (!_frame_id.empty() && tf.resolve(percept->header.frame_id) != tf.resolve(_frame_id)) {
417  ROS_DEBUG("Transforming percept from %s frame to %s frame", percept->header.frame_id.c_str(), _frame_id.c_str());
418 
419  // retrieve camera transformation from tf
420  try {
421  tf.waitForTransform(_frame_id, percept->header.frame_id, percept->header.stamp, ros::Duration(1.0));
422  tf.lookupTransform(_frame_id, percept->header.frame_id, percept->header.stamp, cameraTransform);
423  } catch (tf::TransformException& ex) {
424  ROS_ERROR("%s", ex.what());
425  return;
426  }
427 
428  pose = cameraTransform * pose;
429 
430  // rotate covariance matrix to map coordinates
431  Eigen::Matrix3f rotation_map_camera;
432  rotation_map_camera << cameraTransform.getBasis()[0][0], cameraTransform.getBasis()[0][1], cameraTransform.getBasis()[0][2],
433  cameraTransform.getBasis()[1][0], cameraTransform.getBasis()[1][1], cameraTransform.getBasis()[1][2],
434  cameraTransform.getBasis()[2][0], cameraTransform.getBasis()[2][1], cameraTransform.getBasis()[2][2];
435  covariance = rotation_map_camera * covariance * rotation_map_camera.transpose();
436  }
437 
438  // check height
439  float relative_height = pose.getOrigin().z() - cameraTransform.getOrigin().z();
440  if (relative_height < parameter(_min_height, percept->info.class_id) || relative_height > parameter(_max_height, percept->info.class_id)) {
441  ROS_INFO("Discarding %s percept with height %f", percept->info.class_id.c_str(), relative_height);
442  return;
443  }
444 
445  // get object orientation from normal to the wall
446  ros::ServiceClientPtr get_normal_client = parameter(_get_normal_service, percept->info.class_id);
447  if (get_normal_client) {
448  // Get normal at object's position
449  hector_nav_msgs::GetNormal get_normal;
450  get_normal.request.point.point.x = pose.getOrigin().x();
451  get_normal.request.point.point.y = pose.getOrigin().y();
452  get_normal.request.point.point.z = pose.getOrigin().z();
453  get_normal.request.point.header.frame_id = "map";
454  get_normal.request.point.header.stamp = percept->header.stamp;
455 
456  if (get_normal_client->call(get_normal.request, get_normal.response)) {
457  tf::Vector3 normal(get_normal.response.normal.x, get_normal.response.normal.y, get_normal.response.normal.z);
458  normal.normalize(); // we don't trust the server :-)
459 
460  // invert normal if it points away from the percept's x-axis
461  if (pose.getBasis().getRow(0) * normal < 0) {
462  normal = -normal;
463  }
464 
465  {
466  // set rotation of object so that the x-axis points in the direction of the normal and y-axis is parallel to the x-y-plane
467  const tf::Point &n(normal); // for readability
468  if (n.z() >= 0.999) {
469  pose.setBasis(tf::Matrix3x3(0, 0, -1., 0, 1., 0, 1., 0, 0));
470  } else if (n.z() <= -0.999) {
471  pose.setBasis(tf::Matrix3x3(0, 0, 1., 0, 1., 0, -1., 0, 0));
472  } else {
473  double c = 1./sqrt(1. - n.z()*n.z());
474  pose.setBasis(tf::Matrix3x3(n.x(), -c*n.y(), -c*n.x()*n.z(),
475  n.y(), c*n.x(), -c*n.y()*n.z(),
476  n.z(), 0, 1./c));
477  }
478  }
479 
480  tfScalar r,p,y;
481  pose.getBasis().getRPY(r, p, y);
482  ROS_DEBUG("Object orientation was updated from wall normals: %f", y * 180./M_PI);
483 
484  } else {
485  tfScalar r,p,y;
486  pose.getBasis().getRPY(r, p, y);
487  ROS_DEBUG("View angle was used as object orientation: %f", y * 180./M_PI);
488  }
489  }
490 
491  // fix height (assume camera is always at 0.475m)
492  // pose.setOrigin(tf::Point(pose.getOrigin().x(), pose.getOrigin().y(), pose.getOrigin().z() - cameraTransform.getOrigin().z() + 0.475f));
493 
494  // calculate observation support
495  float support = 0.0;
496  if (!percept->info.object_id.empty()) {
497  support = percept->info.object_support;
498  } else if (!percept->info.class_id.empty()) {
499  support = percept->info.class_support + support_added_by_percept_verification;
500  }
501 
502  if (support == 0.0) {
503  ROS_WARN("Ignoring percept with support == 0.0");
504  return;
505  }
506 
507  // lock model
508  model.lock();
509 
510  // find correspondence
511  ObjectPtr object;
512  if (percept->info.object_id.empty()) {
513  model.getBestCorrespondence(object, pose, covariance, percept->info.class_id, percept->info.name, 1.0f);
514  } else {
515  object = model.getObject(percept->info.object_id);
516  }
517 
518  if (object && object->getState() < 0) {
519  ROS_DEBUG("Percept was associated to object %s, which has a fixed state", object->getObjectId().c_str());
520  model.unlock();
521  return;
522  }
523 
524  // create new object
525  if (!object) {
526  object = model.add(percept->info.class_id, percept->info.object_id);
527 
528  object->setPose(pose);
529  object->setCovariance(covariance);
530  object->setSupport(support);
531 
532  ROS_INFO("Found new object %s of class %s at (%f,%f)!", object->getObjectId().c_str(), object->getClassId().c_str(), pose.getOrigin().getX(), pose.getOrigin().getY());
533 
534  // or update existing object
535  } else if (support > 0.0) {
536  //object->update(pose, covariance, support);
537  object->intersect(pose, covariance, support);
538 
539  // or simply decrease support
540  } else {
541  object->addSupport(support);
542  }
543 
544  // update object state
545  if ((object->getState() == ObjectState::UNKNOWN || object->getState() == ObjectState::INACTIVE) && parameter(_pending_support, percept->info.class_id) > 0) {
546  if (object->getSupport() >= parameter(_pending_support, percept->info.class_id) && (percept->header.stamp - object->getHeader().stamp).toSec() >= parameter(_pending_time, percept->info.class_id)) {
547  object->setState(ObjectState::PENDING);
548  }
549  }
550  if (object->getState() == ObjectState::PENDING && parameter(_active_support, percept->info.class_id) > 0) {
551  if (object->getSupport() >= parameter(_active_support, percept->info.class_id) && (percept->header.stamp - object->getHeader().stamp).toSec() >= parameter(_active_time, percept->info.class_id)) {
552  object->setState(ObjectState::ACTIVE);
553  }
554  }
555 
556  // update object header
557  std_msgs::Header header;
558  header.stamp = percept->header.stamp;
559  header.frame_id = _frame_id;
560  object->setHeader(header);
561 
562  // update object name
563  if (!percept->info.name.empty()) object->setName(percept->info.name);
564 
565  // unlock model
566  model.unlock();
567 
568  // call object verification
569  ServiceClientsWithProperties &object_verification_services = parameter(_object_verification_services, percept->info.class_id);
570  if (!object_verification_services.empty()) {
571  hector_worldmodel_msgs::VerifyObject::Request request;
572  hector_worldmodel_msgs::VerifyObject::Response response;
573 
574  object->getMessage(request.object);
575 
576  for(ServiceClientsWithProperties::iterator it = object_verification_services.begin(); it != object_verification_services.end(); ++it) {
577  if (it->second.hasMember("ignore") && it->second["ignore"]) {
578  ROS_DEBUG("Calling service %s for object %s, but ignoring its answer...", it->first->getService().c_str(), object->getObjectId().c_str());
579  it->first->call(request, response);
580 
581  } else if (it->first->call(request, response)) {
582  if (response.response == response.DISCARD) {
583  ROS_DEBUG("Discarded object %s due to DISCARD message from service %s", object->getObjectId().c_str(), it->first->getService().c_str());
584  object->setState(ObjectState::DISCARDED);
585  }
586  if (response.response == response.CONFIRM) {
587  ROS_DEBUG("We got a CONFIRMation for object %s from service %s!", object->getObjectId().c_str(), it->first->getService().c_str());
588  object->addSupport(100.0);
589  }
590  if (response.response == response.UNKNOWN) {
591  ROS_DEBUG("Verification service %s cannot help us with object %s at the moment :-(", it->first->getService().c_str(), object->getObjectId().c_str());
592  }
593  } else if (it->second.hasMember("required") && it->second["required"]) {
594  ROS_DEBUG("Discarded object %s as required service %s is not available", object->getObjectId().c_str(), it->first->getService().c_str());
595  object->setState(ObjectState::DISCARDED);
596  }
597  }
598  }
599 
600  // publish pose in target frame for debugging purposes
602  geometry_msgs::PoseStamped pose;
603  object->getPose(pose.pose);
604  pose.header = object->getHeader();
606  }
607 
608  modelUpdatePublisher.publish(object->getMessage());
609  publishModel();
610 }
611 
612 ObjectTracker::CameraModelPtr ObjectTracker::getCameraModel(const std::string& frame_id, const sensor_msgs::CameraInfo& camera_info) {
613  // retrieve camera model from either the cache or from CameraInfo given in the percept
614  CameraModelPtr cameraModel;
615  if (cameraModels.count(frame_id) == 0) {
616  cameraModel.reset(new image_geometry::PinholeCameraModel());
617  if (!cameraModel->fromCameraInfo(camera_info)) {
618  ROS_ERROR("Could not initialize camera model from CameraInfo given in the percept");
619  return CameraModelPtr();
620  }
621  cameraModels[frame_id] = cameraModel;
622  } else {
623  cameraModel = cameraModels[frame_id];
624  }
625 
626  return cameraModel;
627 }
628 
629 void ObjectTracker::objectAgeingCb(const std_msgs::Float32ConstPtr &ageing) {
630  ROS_DEBUG("ageing of all objects by %f", ageing->data);
631 
632  // lock model
633  model.lock();
634 
635  ObjectList objects = model.getObjects();
636 
637  for(ObjectModel::iterator it = objects.begin(); it != objects.end();) {
638  ObjectPtr object = *it;
639 
640  // update support
641  object->setSupport(object->getSupport() - ageing->data);
642 
643  // remove the object if the support is to low
644  if (object->getSupport() < _ageing_threshold) {
645  ROS_INFO("remove object %s with support %f", object->getObjectId().c_str(), object->getSupport());
646  it = objects.erase(it);
647  model.remove(object);
648  } else {
649  it++;
650  }
651  }
652 
653  // unlock model
654  model.unlock();
655  publishModel();
656 }
657 
658 void ObjectTracker::modelUpdateCb(const hector_worldmodel_msgs::ObjectModelConstPtr &update)
659 {
660  for(hector_worldmodel_msgs::ObjectModel::_objects_type::const_iterator it = update->objects.begin(); it != update->objects.end(); ++it)
661  {
662  hector_worldmodel_msgs::AddObject object;
663  object.request.map_to_next_obstacle = false;
664  object.request.object = *it;
665  if (!addObjectCb(object.request, object.response)) {
666  ROS_WARN("Could not update object %s", it->info.object_id.c_str());
667  }
668  }
669 }
670 
671 bool ObjectTracker::setObjectStateCb(hector_worldmodel_msgs::SetObjectState::Request& request, hector_worldmodel_msgs::SetObjectState::Response& response) {
672  model.lock();
673 
674  ObjectPtr object = model.getObject(request.object_id);
675  if (!object) {
676  model.unlock();
677  return false;
678  }
679 
680  object->setState(request.new_state.state);
681  modelUpdatePublisher.publish(object->getMessage());
682 
683  // check minimum distance between objects and discard objects which are closer
684  double min_distance_between_objects = parameter(_min_distance_between_objects, object->getClassId());
685  if ((min_distance_between_objects > 0.0) && (request.new_state.state == hector_worldmodel_msgs::ObjectState::CONFIRMED)) {
686  for(ObjectModel::iterator it = model.begin(); it != model.end(); ++it) {
687  ObjectPtr other = *it;
688  if (other == object) continue;
689  if (other->getClassId() != object->getClassId()) continue;
690 
691  if (other->getDistance(*object) < min_distance_between_objects) {
692  other->setState(hector_worldmodel_msgs::ObjectState::DISCARDED);
693  modelUpdatePublisher.publish(other->getMessage());
694  }
695  }
696  }
697 
698  model.unlock();
699  publishModel();
700  return true;
701 }
702 
703 bool ObjectTracker::setObjectNameCb(hector_worldmodel_msgs::SetObjectName::Request& request, hector_worldmodel_msgs::SetObjectName::Response& response) {
704  model.lock();
705 
706  ObjectPtr object = model.getObject(request.object_id);
707  if (!object) {
708  model.unlock();
709  return false;
710  }
711 
712  object->setName(request.name);
713  modelUpdatePublisher.publish(object->getMessage());
714 
715  model.unlock();
716  publishModel();
717  return true;
718 }
719 
720 bool ObjectTracker::addObjectCb(hector_worldmodel_msgs::AddObject::Request& request, hector_worldmodel_msgs::AddObject::Response& response) {
721  ObjectPtr object;
722  bool newObject = false;
723 
724  // check if object already exist
725  if (!request.object.info.object_id.empty()) {
726  ROS_INFO("add_object service called for known %s object %s in frame %s", request.object.info.class_id.c_str(), request.object.info.object_id.c_str(), request.object.header.frame_id.c_str());
727  object = model.getObject(request.object.info.object_id);
728  } else {
729  ROS_INFO("add_object service called for new %s object in frame %s", request.object.info.class_id.c_str(), request.object.header.frame_id.c_str());
730  }
731 
732  // create a new object if object does not exist
733  if (!object) {
734  object.reset(new Object(request.object.info.class_id, request.object.info.object_id));
735  newObject = true;
736  }
737 
738  std_msgs::Header header = request.object.header;
739  if (header.stamp.isZero()) header.stamp = ros::Time::now();
740 
741  geometry_msgs::PoseWithCovariance pose;
742  if (request.map_to_next_obstacle) {
743  pose.covariance = request.object.pose.covariance;
744  if (!mapToNextObstacle(request.object.pose.pose, header, request.object.info, pose.pose)) {
745  return false;
746  }
747  } else {
748  pose = request.object.pose;
749  }
750 
751  // if no variance is given, set variance to default
752  if (pose.covariance[0] == 0 && pose.covariance[7] == 0 && pose.covariance[14] == 0 &&
753  pose.covariance[21] == 0 && pose.covariance[28] == 0 && pose.covariance[35] == 0) {
754  pose.covariance.assign(0.0);
755  pose.covariance[0] = 1.0;
756  pose.covariance[7] = 1.0;
757  pose.covariance[14] = 1.0;
758  }
759 
760  if (!transformPose(pose, pose, header)) return false;
761 
762  model.lock();
763 
764  object->setHeader(header);
765  object->setPose(pose);
766  object->setState(request.object.state.state);
767  object->setSupport(request.object.info.support);
768 
769  if (newObject) model.add(object);
770  object->getMessage(response.object);
771  modelUpdatePublisher.publish(response.object);
772 
773  model.unlock();
774 
775  publishModel();
776  return true;
777 }
778 
779 bool ObjectTracker::getObjectModelCb(hector_worldmodel_msgs::GetObjectModel::Request& request, hector_worldmodel_msgs::GetObjectModel::Response& response) {
780  getMergedModel().getMessage(response.model);
781  return true;
782 }
783 
784 void ObjectTracker::mergeModelCallback(const hector_worldmodel_msgs::ObjectModelConstPtr &new_model, const MergedModelPtr& info)
785 {
786  info->model = *new_model;
787  publishModel();
788 }
789 
790 void ObjectTracker::negativeUpdateCallback(const sensor_msgs::CameraInfoConstPtr &camera_info, const NegativeUpdatePtr& info)
791 {
792  model.lock();
793 
794  // retrieve camera model from either the cache or from CameraInfo given in the percept
795  CameraModelPtr cameraModel = getCameraModel(camera_info->header.frame_id, *camera_info);
796 
797  // get camera transform
798  tf::StampedTransform cameraInverseTransform;
799  try {
800  tf.waitForTransform(camera_info->header.frame_id, _frame_id, camera_info->header.stamp, ros::Duration(1.0));
801  tf.lookupTransform(camera_info->header.frame_id, _frame_id, camera_info->header.stamp, cameraInverseTransform);
802  } catch (tf::TransformException& ex) {
803  ROS_ERROR("%s", ex.what());
804  return;
805  }
806 
807  ROS_DEBUG_NAMED("negative_update", "Doing negative update for frame_id %s", camera_info->header.frame_id.c_str());
808 
809  // iterate through objects
810  for(ObjectModel::iterator it = model.begin(); it != model.end(); ++it) {
811  const ObjectPtr& object = *it;
812 
813  // do not update objects with state < 0
814  if (object->getState() < 0) continue;
815 
816  // check class_id
817  if (!info->class_id.empty() && info->class_id != object->getClassId()) {
818  ROS_DEBUG_NAMED("negative_update", "%s: wrong class_id %s", object->getObjectId().c_str(), object->getClassId().c_str());
819  continue;
820  }
821 
822  // check last seen stamp
823  if (object->getStamp() >= camera_info->header.stamp - info->not_seen_duration) {
824  ROS_DEBUG_NAMED("negative_update", "%s: seen %f seconds ago", object->getObjectId().c_str(), (camera_info->header.stamp - object->getStamp()).toSec());
825  continue;
826  }
827 
828  // transform object pose to camera coordinates
829  tf::Pose pose;
830  object->getPose(pose);
831  pose = cameraInverseTransform * pose;
832  cv::Point3d xyz(pose.getOrigin().x(), pose.getOrigin().y(), pose.getOrigin().z());
833 
834  // check distance
835  float distance = pose.getOrigin().length();
836  if (distance < info->min_distance || (info->max_distance > 0.0 && distance > info->max_distance)) {
837  ROS_DEBUG_NAMED("negative_update", "%s: wrong distance: %f", object->getObjectId().c_str(), distance);
838  continue;
839  }
840 
841  // get image point
842  cv::Point2d point = cameraModel->project3dToPixel(xyz);
843 
844  // check if object is within field of view
845  if (!(point.x > 0 + info->ignore_border_pixels &&
846  point.x < camera_info->width - info->ignore_border_pixels &&
847  point.y > 0 + info->ignore_border_pixels &&
848  point.y < camera_info->height - info->ignore_border_pixels)) {
849  ROS_DEBUG_NAMED("negative_update", "%s: not within field of view (image coordinates (%f,%f))", object->getObjectId().c_str(), point.x, point.y);
850  continue;
851  }
852 
853  // ==> do negative update
854  ROS_DEBUG("Doing negative update of %s. Should be at image coordinates (%f,%f).", object->getObjectId().c_str(), point.x, point.y);
855  object->addSupport(-info->negative_support);
856  if (object->getSupport() < info->min_support) object->setSupport(info->min_support);
857  if (object->getSupport() <= parameter(_inactive_support, info->class_id)) {
858  if (object->getState() == ObjectState::PENDING || object->getState() == ObjectState::ACTIVE)
859  object->setState(ObjectState::INACTIVE);
860  }
861 
862  // publish object update
863  modelUpdatePublisher.publish(object->getMessage());
864  }
865 
866  model.unlock();
867  // publishModel();
868 }
869 
870 bool ObjectTracker::mapToNextObstacle(const geometry_msgs::Pose& source, const std_msgs::Header &header, const ObjectInfo &info, geometry_msgs::Pose &mapped) {
871  Parameters::load(info.class_id);
873 
874  if (!client || !client->exists()) {
875  ROS_ERROR("Could not map object to next obstacle as the distance service %s is not available", client->getService().c_str());
876  return false;
877  }
878 
879  // retrieve distance information
880  float distance = parameter(_default_distance, info.class_id);
881  hector_nav_msgs::GetDistanceToObstacle::Request request;
882  hector_nav_msgs::GetDistanceToObstacle::Response response;
883 
884  // project image percept to the next obstacle
885  request.point.header = header;
886  request.point.point = source.position;
887  // tf::pointTFToMsg(cameraTransform.getOrigin(), request.pose.pose.position);
888  // tf::Quaternion direction_quaternion = tf::Quaternion(atan(direction.y/direction.x), atan(direction.z/direction.x), 0.0);
889  // direction_quaternion *= cameraTransform.getRotation();
890  // tf::quaternionTFToMsg(direction_quaternion, request.pose.pose.orientation);
891  if (client->call(request, response) && response.distance > 0.0) {
892  // distance = std::max(response.distance - 0.1f, 0.0f);
893  distance = std::max(response.distance, 0.0f);
894  } else {
895  ROS_DEBUG("Could not map object to next obstacle due to unknown or infinite distance");
896  return false;
897  }
898 
899  tf::Pose sourceTF;
900  tf::poseMsgToTF(source, sourceTF);
901  sourceTF.setOrigin(sourceTF.getOrigin().normalized() * distance);
902  tf::poseTFToMsg(sourceTF, mapped);
903 
904  return true;
905 }
906 
907 bool ObjectTracker::transformPose(const geometry_msgs::Pose& from, geometry_msgs::Pose &to, std_msgs::Header &header, tf::StampedTransform *transform_ptr) {
908  // retrieve transformation from tf
909  tf::StampedTransform transform;
910  try {
911  tf.waitForTransform(_frame_id, header.frame_id, header.stamp, ros::Duration(1.0));
912  tf.lookupTransform(_frame_id, header.frame_id, header.stamp, transform);
913  } catch (tf::TransformException& ex) {
914  ROS_ERROR("%s", ex.what());
915  return false;
916  }
917 
918  tf::Pose tfPose;
919  tf::poseMsgToTF(from, tfPose);
920  tfPose = transform * tfPose;
921  tf::poseTFToMsg(tfPose, to);
922 
923  header.frame_id = _frame_id;
924  if (transform_ptr) *transform_ptr = transform;
925 
926  return true;
927 }
928 
929 bool ObjectTracker::transformPose(const geometry_msgs::PoseWithCovariance& from, geometry_msgs::PoseWithCovariance &to, std_msgs::Header &header) {
930  tf::StampedTransform transform;
931 
932  if (!transformPose(from.pose, to.pose, header, &transform)) return false;
933 
934  // TODO
935  // rotate covariance matrix
936 
937  return true;
938 }
939 
941 {
942  if (merged_models.size() == 0) return model;
943 
944  ROS_DEBUG("Merging object models from %lu sources.", merged_models.size());
945 
946  // merge with other models from merged_models
947  ObjectModel merged(model);
948  for(std::vector<MergedModelPtr>::iterator it = merged_models.begin(); it != merged_models.end(); ++it)
949  {
950  merged.mergeWith((*it)->model, tf, (*it)->prefix);
951  }
952 
953  return merged;
954 }
955 
957  publishModel();
958 }
959 
961  ObjectModel merged_model = getMergedModel();
962 
963  // Publish all model data on topic /objects
964  modelPublisher.publish(merged_model.getMessage());
965 
966  // Visualize objects and covariance in rviz
967  visualization_msgs::MarkerArray markers;
968  merged_model.getVisualization(markers);
969 // drawings.setTime(ros::Time::now());
970 // model.lock();
971 // for(ObjectModel::iterator it = model.begin(); it != model.end(); ++it) {
972 // ObjectPtr object = *it;
973 // drawings.addMarkers(object->getVisualization());
974 // drawings.setColor(1.0, 0.0, 0.0, drawings.markerArray.markers.back().color.a);
975 // drawings.drawCovariance(Eigen::Vector2f(object->getPosition().x(), object->getPosition().y()), object->getCovariance().block<2,2>(0,0));
976 // }
977 // model.unlock();
978  drawings.addMarkers(markers);
980 }
981 
982 } // namespace hector_object_tracker
983 
984 
985 int main(int argc, char **argv)
986 {
987  ros::init(argc, argv, "object_tracker");
988 
990  ros::spin();
991 
992  exit(0);
993 }
std::map< std::string, double > _inactive_support
Definition: parameters.cpp:51
void getVisualization(visualization_msgs::MarkerArray &markers) const
d
std::map< std::string, double > _pending_support
Definition: parameters.cpp:47
std::map< std::string, ros::ServiceClientPtr > _distance_to_obstacle_service
Definition: parameters.cpp:55
ServiceClient serviceClient(const std::string &service_name, bool persistent=false, const M_string &header_values=M_string())
bool transformPose(const geometry_msgs::Pose &from, geometry_msgs::Pose &to, std_msgs::Header &header, tf::StampedTransform *transform=0)
std::map< std::string, double > _inactive_time
Definition: parameters.cpp:52
static void poseMsgToTF(const geometry_msgs::Pose &msg, Pose &bt)
boost::shared_ptr< void const > VoidConstPtr
std::map< std::string, double > _min_height
Definition: parameters.cpp:45
void remove(ObjectPtr object)
Definition: ObjectModel.cpp:95
void publish(const boost::shared_ptr< M > &message) const
void publishModelEvent(const ros::TimerEvent &)
virtual void sendAndResetData()
f
double tfScalar
std::map< std::string, double > _active_time
Definition: parameters.cpp:50
Subscriber subscribe(const std::string &topic, uint32_t queue_size, void(T::*fp)(M), T *obj, const TransportHints &transport_hints=TransportHints())
std::map< std::string, double > _pending_time
Definition: parameters.cpp:48
TFSIMD_FORCE_INLINE Matrix3x3 & getBasis()
std::vector< ServiceClientWithProperties > ServiceClientsWithProperties
Definition: parameters.h:60
int size() const
std::map< std::string, double > _min_distance_between_objects
Definition: parameters.cpp:53
void imagePerceptCb(const hector_worldmodel_msgs::ImagePerceptConstPtr &)
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
std::map< std::string, ros::ServiceClientPtr > _get_normal_service
Definition: parameters.cpp:56
std::map< std::string, CameraModelPtr > cameraModels
std::map< std::string, std_msgs::ColorRGBA > _marker_color
Definition: parameters.cpp:54
bool setObjectNameCb(hector_worldmodel_msgs::SetObjectName::Request &request, hector_worldmodel_msgs::SetObjectName::Response &response)
TFSIMD_FORCE_INLINE const tfScalar & getY() const
TFSIMD_FORCE_INLINE tfScalar distance(const Vector3 &v) const
std::map< std::string, bool > _with_orientation
Definition: parameters.cpp:41
static void setNamespace(const std::string &ns)
Definition: Object.cpp:276
ServiceServer advertiseService(const std::string &service, bool(T::*srv_func)(MReq &, MRes &), T *obj)
std::map< std::string, double > _max_height
Definition: parameters.cpp:46
Type const & getType() const
TFSIMD_FORCE_INLINE const tfScalar & y() const
#define ROS_WARN(...)
std::vector< NegativeUpdatePtr > negativeUpdate
ROSCPP_DECL void spin(Spinner &spinner)
void sysCommandCb(const std_msgs::StringConstPtr &)
TFSIMD_FORCE_INLINE const Vector3 & getRow(int i) const
std::vector< MergedModelPtr > merged_models
ObjectList::iterator iterator
Definition: ObjectModel.h:22
boost::shared_ptr< image_geometry::PinholeCameraModel > CameraModelPtr
std::list< ObjectPtr > ObjectList
Definition: types.h:46
TFSIMD_FORCE_INLINE const tfScalar & x() const
#define ROS_DEBUG_NAMED(name,...)
void modelUpdateCb(const hector_worldmodel_msgs::ObjectModelConstPtr &)
bool addObjectCb(hector_worldmodel_msgs::AddObject::Request &request, hector_worldmodel_msgs::AddObject::Response &response)
void negativeUpdateCallback(const sensor_msgs::CameraInfoConstPtr &, const NegativeUpdatePtr &info)
void getMessage(hector_worldmodel_msgs::ObjectModel &model) const
Definition: ObjectModel.cpp:62
void load(const std::string &class_id)
Definition: parameters.cpp:62
TFSIMD_FORCE_INLINE const tfScalar & z() const
void getRPY(tfScalar &roll, tfScalar &pitch, tfScalar &yaw, unsigned int solution_number=1) const
#define ROS_INFO(...)
options
std::map< std::string, bool > _project_objects
Definition: parameters.cpp:40
void mergeModelCallback(const hector_worldmodel_msgs::ObjectModelConstPtr &new_model, const MergedModelPtr &info)
bool param(const std::string &param_name, T &param_val, const T &default_val) const
std::map< std::string, double > _default_distance
Definition: parameters.cpp:42
ObjectTracker::CameraModelPtr getCameraModel(const std::string &frame_id, const sensor_msgs::CameraInfo &camera_info)
TFSIMD_FORCE_INLINE Vector3 normalized() const
std::map< std::string, ServiceClientsWithProperties > _percept_verification_services
Definition: parameters.cpp:57
ROSCPP_DECL bool ok()
TFSIMD_FORCE_INLINE const tfScalar & y() const
void setFrameId(const std::string &frame_id)
Definition: ObjectModel.cpp:58
Timer createTimer(Rate r, Handler h, Obj o, bool oneshot=false, bool autostart=true) const
int main(int argc, char **argv)
float getBestCorrespondence(ObjectPtr &object, const tf::Pose &pose, const Eigen::Matrix3f &covariance, const std::string &class_id, const std::string &name, float max_distance=0.0) const
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
std::map< std::string, double > _active_support
Definition: parameters.cpp:49
std::map< std::string, double > _angle_variance
Definition: parameters.cpp:44
TFSIMD_FORCE_INLINE void setBasis(const Matrix3x3 &basis)
bool mapToNextObstacle(const geometry_msgs::Pose &source, const std_msgs::Header &header, const ObjectInfo &info, geometry_msgs::Pose &mapped)
TFSIMD_FORCE_INLINE Vector3 & getOrigin()
TFSIMD_FORCE_INLINE Vector3 & normalize()
static T & parameter(std::map< std::string, T > &p, const std::string &class_id=std::string())
Definition: parameters.h:68
bool hasMember(const std::string &name) const
virtual void addMarkers(visualization_msgs::MarkerArray markers)
void posePerceptCb(const hector_worldmodel_msgs::PosePerceptConstPtr &)
uint32_t getNumSubscribers() const
static void poseTFToMsg(const Pose &bt, geometry_msgs::Pose &msg)
bool getParam(const std::string &key, std::string &s) const
TFSIMD_FORCE_INLINE const tfScalar & getX() const
static Time now()
std::map< std::string, double > _distance_variance
Definition: parameters.cpp:43
ObjectPtr getObject(const std::string &object_id) const
Definition: ObjectModel.cpp:38
Matrix3x3 transpose() const
ObjectPtr add(const std::string &class_id="", const std::string &object_id="")
Definition: ObjectModel.cpp:86
void objectAgeingCb(const std_msgs::Float32ConstPtr &)
bool setObjectStateCb(hector_worldmodel_msgs::SetObjectState::Request &request, hector_worldmodel_msgs::SetObjectState::Response &response)
std::map< std::string, ServiceClientsWithProperties > _object_verification_services
Definition: parameters.cpp:58
#define ROS_ERROR(...)
TFSIMD_FORCE_INLINE void setOrigin(const Vector3 &origin)
bool getObjectModelCb(hector_worldmodel_msgs::GetObjectModel::Request &request, hector_worldmodel_msgs::GetObjectModel::Response &response)
#define ROS_DEBUG(...)
static void pointTFToMsg(const Point &bt_v, geometry_msgs::Point &msg_v)


hector_object_tracker
Author(s): Johannes Meyer
autogenerated on Mon Jun 10 2019 13:35:13