00001 #include "object_tracker.h"
00002
00003 #include <hector_nav_msgs/GetDistanceToObstacle.h>
00004 #include <hector_nav_msgs/GetNormal.h>
00005 #include <hector_worldmodel_msgs/VerifyObject.h>
00006 #include <hector_worldmodel_msgs/VerifyPercept.h>
00007
00008 #include <math.h>
00009
00010 #include "Object.h"
00011 #include "parameters.h"
00012
00013 #include <boost/algorithm/string.hpp>
00014
00015 namespace hector_object_tracker {
00016
00017 ObjectTracker::ObjectTracker()
00018 {
00019 ros::NodeHandle priv_nh("~");
00020 priv_nh.param("frame_id", _frame_id, std::string("map"));
00021 priv_nh.param("worldmodel_ns", _worldmodel_ns, std::string("worldmodel"));
00022 priv_nh.param("ageing_threshold", _ageing_threshold, 1.0);
00023 priv_nh.param("publish_interval", _publish_interval, 0.0);
00024
00025 parameter(_project_objects) = false;
00026 parameter(_with_orientation) = false;
00027 parameter(_default_distance) = 1.0;
00028 parameter(_distance_variance) = pow(1.0, 2);
00029 parameter(_angle_variance) = pow(10.0 * M_PI / 180.0, 2);
00030 parameter(_min_height) = -999.9;
00031 parameter(_max_height) = 999.9;
00032 parameter(_pending_support) = 0.0;
00033 parameter(_pending_time) = 0.0;
00034 parameter(_active_support) = 0.0;
00035 parameter(_active_time) = 0.0;
00036 parameter(_inactive_support) = 0.0;
00037 parameter(_inactive_time) = 0.0;
00038 parameter(_min_distance_between_objects) = 0.0;
00039
00040 std_msgs::ColorRGBA default_color;
00041 default_color.r = 0.8; default_color.a = 1.0;
00042 parameter(_marker_color) = default_color;
00043
00044 Parameters::load();
00045
00046 ros::NodeHandle worldmodel(_worldmodel_ns);
00047 imagePerceptSubscriber = worldmodel.subscribe("image_percept", 10, &ObjectTracker::imagePerceptCb, this);
00048 posePerceptSubscriber = worldmodel.subscribe("pose_percept", 10, &ObjectTracker::posePerceptCb, this);
00049 objectAgeingSubscriber = worldmodel.subscribe("object_ageing", 10, &ObjectTracker::objectAgeingCb, this);
00050 modelPublisher = worldmodel.advertise<hector_worldmodel_msgs::ObjectModel>("objects", 10, false);
00051 modelUpdatePublisher = worldmodel.advertise<hector_worldmodel_msgs::Object>("object", 10, false);
00052 modelUpdateSubscriber = worldmodel.subscribe<hector_worldmodel_msgs::ObjectModel>("update", 10, &ObjectTracker::modelUpdateCb, this);
00053
00054 Object::setNamespace(_worldmodel_ns);
00055 model.setFrameId(_frame_id);
00056
00057 sysCommandSubscriber = nh.subscribe("/syscommand", 10, &ObjectTracker::sysCommandCb, this);
00058 perceptPoseDebugPublisher = priv_nh.advertise<geometry_msgs::PoseStamped>("percept/pose", 10, false);
00059 objectPoseDebugPublisher = priv_nh.advertise<geometry_msgs::PoseStamped>("object/pose", 10, false);
00060
00061 XmlRpc::XmlRpcValue verification_services;
00062 if (priv_nh.getParam("verification_services", verification_services) && verification_services.getType() == XmlRpc::XmlRpcValue::TypeArray) {
00063 for(int i = 0; i < verification_services.size(); ++i) {
00064 XmlRpc::XmlRpcValue item = verification_services[i];
00065 if (!item.hasMember("service")) {
00066 ROS_ERROR("Verification service %d could not be intialized: no service name given", i);
00067 continue;
00068 }
00069 if (!item.hasMember("type")) {
00070 ROS_ERROR("Verification service %d could not be intialized: no service type given", i);
00071 continue;
00072 }
00073
00074 ros::ServiceClientPtr client(new ros::ServiceClient);
00075 if (item["type"] == "object") {
00076 *client = nh.serviceClient<hector_worldmodel_msgs::VerifyObject>(item["service"]);
00077 } else if (item["type"] == "percept") {
00078 *client = nh.serviceClient<hector_worldmodel_msgs::VerifyPercept>(item["service"]);
00079 }
00080
00081 if (!client || !client->isValid()) continue;
00082 if (!client->exists()) {
00083 if (!item.hasMember("required") || !item["required"]) {
00084 ROS_WARN("Verification service %s is not (yet) there...", client->getService().c_str());
00085 } else {
00086 ROS_WARN("Required verification service %s is not available... waiting...", client->getService().c_str());
00087 while(ros::ok() && !client->waitForExistence(ros::Duration(1.0)));
00088 }
00089 }
00090
00091 std::string class_id;
00092 if (item.hasMember("class_id")) class_id = std::string(item["class_id"]);
00093
00094 if (item["type"] == "object") {
00095 parameter(_object_verification_services, class_id).push_back(std::make_pair(client, item));
00096 if (class_id.empty()) {
00097 ROS_INFO("Using object verification service %s for all object classes", client->getService().c_str());
00098 } else {
00099 ROS_INFO("Using object verification service %s for objects of class %s", client->getService().c_str(), class_id.c_str());
00100 }
00101 } else if (item["type"] == "percept") {
00102 parameter(_percept_verification_services, class_id).push_back(std::make_pair(client, item));
00103 if (class_id.empty()) {
00104 ROS_INFO("Using percept verification service %s for all percept classes", client->getService().c_str());
00105 } else {
00106 ROS_INFO("Using percept verification service %s for percepts of class %s", client->getService().c_str(), class_id.c_str());
00107 }
00108 }
00109 }
00110 }
00111
00112
00113
00114
00115
00116
00117
00118
00119
00120
00121
00122
00123
00124
00125
00126
00127
00128
00129
00130
00131
00132
00133
00134
00135
00136
00137
00138
00139
00140
00141
00142
00143
00144 setObjectState = worldmodel.advertiseService("set_object_state", &ObjectTracker::setObjectStateCb, this);
00145 setObjectName = worldmodel.advertiseService("set_object_name", &ObjectTracker::setObjectNameCb, this);
00146 addObject = worldmodel.advertiseService("add_object", &ObjectTracker::addObjectCb, this);
00147 getObjectModel = worldmodel.advertiseService("get_object_model", &ObjectTracker::getObjectModelCb, this);
00148
00149 XmlRpc::XmlRpcValue merge;
00150 if (priv_nh.getParam("merge", merge) && merge.getType() == XmlRpc::XmlRpcValue::TypeArray) {
00151 for(int i = 0; i < merge.size(); ++i) {
00152 const MergedModelPtr& info = *merged_models.insert(merged_models.end(), boost::make_shared<MergedModelInfo>());
00153 ros::SubscribeOptions options = ros::SubscribeOptions::create<hector_worldmodel_msgs::ObjectModel>(std::string(), 10, boost::bind(&ObjectTracker::mergeModelCallback, this, _1, info), ros::VoidConstPtr(), 0);
00154 if (merge[i].getType() == XmlRpc::XmlRpcValue::TypeStruct && merge[i].hasMember("topic")) {
00155 options.topic = static_cast<std::string>(merge[i]["topic"]);
00156 if (merge[i].hasMember("prefix")) info->prefix = static_cast<std::string>(merge[i]["prefix"]);
00157 } else if (merge[i].getType() == XmlRpc::XmlRpcValue::TypeString) {
00158 options.topic = static_cast<std::string>(merge[i]);
00159 }
00160
00161 if (options.topic.empty()) {
00162 ROS_ERROR("Each entry in parameter merge must be either a string containing the topic to subscribe or a struct.");
00163 continue;
00164 }
00165 info->subscriber = nh.subscribe(options);
00166
00167 ROS_INFO("Subscribed to object model %s.", options.topic.c_str());
00168 }
00169 }
00170
00171 XmlRpc::XmlRpcValue negative_update;
00172 if (priv_nh.getParam("negative_update", negative_update) && negative_update.getType() == XmlRpc::XmlRpcValue::TypeArray) {
00173 for(int i = 0; i < negative_update.size(); ++i) {
00174 if (negative_update[i].getType() != XmlRpc::XmlRpcValue::TypeStruct) continue;
00175 const NegativeUpdatePtr& info = *negativeUpdate.insert(negativeUpdate.end(), boost::make_shared<NegativeUpdateInfo>());
00176
00177
00178 info->negative_support = 0.0;
00179 info->min_support = 0.0;
00180 info->min_distance = 0.0;
00181 info->max_distance = 0.0;
00182 info->ignore_border_pixels = 0.0;
00183 info->not_seen_duration = ros::Duration(0.5);
00184
00185 ros::SubscribeOptions options = ros::SubscribeOptions::create<sensor_msgs::CameraInfo>(std::string(), 10, boost::bind(&ObjectTracker::negativeUpdateCallback, this, _1, info), ros::VoidConstPtr(), 0);
00186 if (negative_update[i].hasMember("topic")) options.topic = static_cast<std::string>(negative_update[i]["topic"]);
00187 if (negative_update[i].hasMember("class_id")) info->class_id = static_cast<std::string>(negative_update[i]["class_id"]);
00188 if (negative_update[i].hasMember("negative_support")) info->negative_support = static_cast<double>(negative_update[i]["negative_support"]);
00189 if (negative_update[i].hasMember("min_support")) info->min_support = static_cast<double>(negative_update[i]["min_support"]);
00190 if (negative_update[i].hasMember("min_distance")) info->min_distance = static_cast<double>(negative_update[i]["min_distance"]);
00191 if (negative_update[i].hasMember("max_distance")) info->max_distance = static_cast<double>(negative_update[i]["max_distance"]);
00192 if (negative_update[i].hasMember("ignore_border_pixels")) info->ignore_border_pixels = static_cast<double>(negative_update[i]["ignore_border_pixels"]);
00193 if (negative_update[i].hasMember("not_seen_duration")) info->not_seen_duration = ros::Duration(static_cast<double>(negative_update[i]["not_seen_duration"]));
00194
00195 if (options.topic.empty()) {
00196 ROS_ERROR("Each entry in parameter negative_update must have a camera_info topic to subscribe to.");
00197 continue;
00198 }
00199 info->subscriber = nh.subscribe(options);
00200 }
00201 }
00202
00203 if (_publish_interval > 0.0) {
00204 publishTimer = nh.createTimer(ros::Duration(_publish_interval), &ObjectTracker::publishModelEvent, this, false, true);
00205 }
00206 }
00207
00208 ObjectTracker::~ObjectTracker()
00209 {}
00210
00211 void ObjectTracker::sysCommandCb(const std_msgs::StringConstPtr &sysCommand)
00212 {
00213 if (sysCommand->data == "reset") {
00214 ROS_INFO("Resetting object model.");
00215 model.reset();
00216 drawings.reset();
00217 for(std::vector<MergedModelPtr>::const_iterator it = merged_models.begin(); it != merged_models.end(); ++it) {
00218 (*it)->model.reset();
00219 }
00220 }
00221 }
00222
00223 void ObjectTracker::imagePerceptCb(const hector_worldmodel_msgs::ImagePerceptConstPtr &percept)
00224 {
00225 hector_worldmodel_msgs::PosePerceptPtr posePercept(new hector_worldmodel_msgs::PosePercept);
00226 tf::Pose pose;
00227
00228 Parameters::load(percept->info.class_id);
00229
00230 ROS_DEBUG("Incoming image percept with image coordinates [%f,%f] in frame %s", percept->x, percept->y, percept->header.frame_id.c_str());
00231
00232 posePercept->header = percept->header;
00233 posePercept->info = percept->info;
00234
00235
00236 float distance = percept->distance > 0.0 ? percept->distance : parameter(_default_distance, percept->info.class_id);
00237
00238
00239 CameraModelPtr cameraModel = getCameraModel(percept->header.frame_id, percept->camera_info);
00240
00241
00242 cv::Point2d rectified = cameraModel->rectifyPoint(cv::Point2d(percept->x, percept->y));
00243 cv::Point3d direction_cv = cameraModel->projectPixelTo3dRay(rectified);
00244 tf::Point direction(direction_cv.x, direction_cv.y, direction_cv.z);
00245 direction.normalize();
00246
00247
00248 pose.setOrigin(tf::Point(direction_cv.x, direction_cv.y, direction_cv.z).normalized() * distance);
00249 {
00250
00251
00252 const tf::Point &d(direction);
00253 if (d.y() >= 0.999) {
00254 pose.setBasis(tf::Matrix3x3( 0., -1., 0.,
00255 1., 0., 0.,
00256 0., 0., 1. ));
00257 } else if (d.y() <= -0.999) {
00258 pose.setBasis(tf::Matrix3x3( 0., -1., 0.,
00259 -1., 0., 0.,
00260 0., 0., -1.));
00261 } else {
00262 double c = 1./sqrt(1. - d.y()*d.y());
00263
00264
00265
00266 pose.setBasis(tf::Matrix3x3(d.x(), -c*d.z(), c*d.x()*d.y(),
00267 d.y(), 0, -1./c,
00268 d.z(), c*d.x(), c*d.y()*d.z() ));
00269 }
00270 }
00271
00272 ROS_DEBUG("--> Rectified image coordinates: [%f,%f]", rectified.x, rectified.y);
00273 ROS_DEBUG("--> Projected 3D ray (OpenCV): [%f,%f,%f]", direction_cv.x, direction_cv.y, direction_cv.z);
00274 ROS_DEBUG("--> Projected 3D ray (tf): [%f,%f,%f]", pose.getOrigin().x(), pose.getOrigin().y(),pose.getOrigin().z());
00275
00276 if (percept->distance == 0.0 && parameter(_project_objects, percept->info.class_id)) {
00277 hector_nav_msgs::GetDistanceToObstacle::Request request;
00278 hector_nav_msgs::GetDistanceToObstacle::Response response;
00279
00280
00281 request.point.header = percept->header;
00282 tf::pointTFToMsg(pose.getOrigin(), request.point.point);
00283 ros::ServiceClientPtr client = parameter(_distance_to_obstacle_service, percept->info.class_id);
00284 if (client && client->call(request, response)) {
00285 if (response.distance > 0.0) {
00286
00287 distance = std::max(response.distance, 0.0f);
00288 pose.setOrigin(pose.getOrigin().normalized() * distance);
00289 ROS_DEBUG("Projected percept to a distance of %.1f m", distance);
00290 } else {
00291 ROS_DEBUG("Ignoring percept due to unknown or infinite distance: service %s returned %f", client->getService().c_str(), response.distance);
00292 return;
00293 }
00294 } else {
00295 ROS_DEBUG("Ignoring percept due to unknown or infinite distance: service is not available");
00296 return;
00297 }
00298 }
00299
00300
00301 Eigen::Matrix3f covariance(Eigen::Matrix3f::Zero());
00302 covariance(0,0) = std::max(distance*distance, 1.0f) * tan(parameter(_angle_variance, percept->info.class_id));
00303 covariance(1,1) = covariance(0,0);
00304 covariance(2,2) = parameter(_distance_variance, percept->info.class_id);
00305
00306
00307 Eigen::Matrix3f rotation_camera_object;
00308 rotation_camera_object << pose.getBasis()[0][0], pose.getBasis()[0][1], pose.getBasis()[0][2],
00309 pose.getBasis()[1][0], pose.getBasis()[1][1], pose.getBasis()[1][2],
00310 pose.getBasis()[2][0], pose.getBasis()[2][1], pose.getBasis()[2][2];
00311 covariance = rotation_camera_object * covariance * rotation_camera_object.transpose();
00312
00313
00314 tf::poseTFToMsg(pose, posePercept->pose.pose);
00315
00316 posePercept->pose.covariance[0] = covariance(0,0);
00317 posePercept->pose.covariance[1] = covariance(0,1);
00318 posePercept->pose.covariance[2] = covariance(0,2);
00319 posePercept->pose.covariance[6] = covariance(1,0);
00320 posePercept->pose.covariance[7] = covariance(1,1);
00321 posePercept->pose.covariance[8] = covariance(1,2);
00322 posePercept->pose.covariance[12] = covariance(2,0);
00323 posePercept->pose.covariance[13] = covariance(2,1);
00324 posePercept->pose.covariance[14] = covariance(2,2);
00325
00326
00327 posePerceptCb(posePercept);
00328 }
00329
00330 void ObjectTracker::posePerceptCb(const hector_worldmodel_msgs::PosePerceptConstPtr &percept)
00331 {
00332
00333 Parameters::load(percept->info.class_id);
00334
00335
00336 if (perceptPoseDebugPublisher.getNumSubscribers() > 0) {
00337 geometry_msgs::PoseStamped pose;
00338 pose.pose = percept->pose.pose;
00339 pose.header = percept->header;
00340 perceptPoseDebugPublisher.publish(pose);
00341 }
00342
00343
00344 float support_added_by_percept_verification = 0.0;
00345 ServiceClientsWithProperties &percept_verification_services = parameter(_percept_verification_services, percept->info.class_id);
00346 if (!percept_verification_services.empty()) {
00347 hector_worldmodel_msgs::VerifyPercept::Request request;
00348 hector_worldmodel_msgs::VerifyPercept::Response response;
00349
00350 request.percept = *percept;
00351
00352 for(ServiceClientsWithProperties::iterator it = percept_verification_services.begin(); it != percept_verification_services.end(); ++it) {
00353 if (!(it->first) || !(it->first->isValid())) continue;
00354
00355 if (it->second.hasMember("ignore") && it->second["ignore"]) {
00356 ROS_DEBUG("Calling service %s for percept of class '%s'', but ignoring its answer...", it->first->getService().c_str(), percept->info.class_id.c_str());
00357 it->first->call(request, response);
00358
00359 } else if (it->first->call(request, response)) {
00360 if (response.response == response.DISCARD) {
00361 ROS_DEBUG("Discarded percept of class '%s' due to DISCARD message from service %s", percept->info.class_id.c_str(), it->first->getService().c_str());
00362 return;
00363 }
00364 if (response.response == response.CONFIRM) {
00365 ROS_DEBUG("We got a CONFIRMation for percept of class '%s' from service %s!", percept->info.class_id.c_str(), it->first->getService().c_str());
00366 support_added_by_percept_verification = 100.0;
00367 }
00368 if (response.response == response.UNKNOWN) {
00369 ROS_DEBUG("Verification service %s cannot help us with percept of class %s at the moment :-(", it->first->getService().c_str(), percept->info.class_id.c_str());
00370 }
00371 } else if (it->second.hasMember("required") && it->second["required"]) {
00372 ROS_DEBUG("Discarded percept of class '%s' as required service %s is not available", percept->info.class_id.c_str(), it->first->getService().c_str());
00373 return;
00374 }
00375 }
00376 }
00377
00378
00379 tf::Pose pose;
00380 tf::poseMsgToTF(percept->pose.pose, pose);
00381
00382
00383
00384
00385
00386
00387
00388
00389
00390
00391
00392
00393
00394
00395
00396
00397
00398
00399
00400
00401
00402
00403 Eigen::Matrix<float,6,6> temp;
00404 for(unsigned int i = 0; i < 36; ++i) temp(i) = percept->pose.covariance[i];
00405 Eigen::Matrix3f covariance = temp.block<3,3>(0,0);
00406
00407
00408 if (covariance.isZero()) {
00409 covariance(0,0) = parameter(_distance_variance, percept->info.class_id);
00410 covariance(1,1) = parameter(_distance_variance, percept->info.class_id);
00411 covariance(2,2) = parameter(_distance_variance, percept->info.class_id);
00412 }
00413
00414
00415 tf::StampedTransform cameraTransform;
00416 if (!_frame_id.empty() && tf.resolve(percept->header.frame_id) != tf.resolve(_frame_id)) {
00417 ROS_DEBUG("Transforming percept from %s frame to %s frame", percept->header.frame_id.c_str(), _frame_id.c_str());
00418
00419
00420 try {
00421 tf.waitForTransform(_frame_id, percept->header.frame_id, percept->header.stamp, ros::Duration(1.0));
00422 tf.lookupTransform(_frame_id, percept->header.frame_id, percept->header.stamp, cameraTransform);
00423 } catch (tf::TransformException& ex) {
00424 ROS_ERROR("%s", ex.what());
00425 return;
00426 }
00427
00428 pose = cameraTransform * pose;
00429
00430
00431 Eigen::Matrix3f rotation_map_camera;
00432 rotation_map_camera << cameraTransform.getBasis()[0][0], cameraTransform.getBasis()[0][1], cameraTransform.getBasis()[0][2],
00433 cameraTransform.getBasis()[1][0], cameraTransform.getBasis()[1][1], cameraTransform.getBasis()[1][2],
00434 cameraTransform.getBasis()[2][0], cameraTransform.getBasis()[2][1], cameraTransform.getBasis()[2][2];
00435 covariance = rotation_map_camera * covariance * rotation_map_camera.transpose();
00436 }
00437
00438
00439 float relative_height = pose.getOrigin().z() - cameraTransform.getOrigin().z();
00440 if (relative_height < parameter(_min_height, percept->info.class_id) || relative_height > parameter(_max_height, percept->info.class_id)) {
00441 ROS_INFO("Discarding %s percept with height %f", percept->info.class_id.c_str(), relative_height);
00442 return;
00443 }
00444
00445
00446 ros::ServiceClientPtr get_normal_client = parameter(_get_normal_service, percept->info.class_id);
00447 if (get_normal_client) {
00448
00449 hector_nav_msgs::GetNormal get_normal;
00450 get_normal.request.point.point.x = pose.getOrigin().x();
00451 get_normal.request.point.point.y = pose.getOrigin().y();
00452 get_normal.request.point.point.z = pose.getOrigin().z();
00453 get_normal.request.point.header.frame_id = "map";
00454 get_normal.request.point.header.stamp = percept->header.stamp;
00455
00456 if (get_normal_client->call(get_normal.request, get_normal.response)) {
00457 tf::Vector3 normal(get_normal.response.normal.x, get_normal.response.normal.y, get_normal.response.normal.z);
00458 normal.normalize();
00459
00460
00461 if (pose.getBasis().getRow(0) * normal < 0) {
00462 normal = -normal;
00463 }
00464
00465 {
00466
00467 const tf::Point &n(normal);
00468 if (n.z() >= 0.999) {
00469 pose.setBasis(tf::Matrix3x3(0, 0, -1., 0, 1., 0, 1., 0, 0));
00470 } else if (n.z() <= -0.999) {
00471 pose.setBasis(tf::Matrix3x3(0, 0, 1., 0, 1., 0, -1., 0, 0));
00472 } else {
00473 double c = 1./sqrt(1. - n.z()*n.z());
00474 pose.setBasis(tf::Matrix3x3(n.x(), -c*n.y(), -c*n.x()*n.z(),
00475 n.y(), c*n.x(), -c*n.y()*n.z(),
00476 n.z(), 0, 1./c));
00477 }
00478 }
00479
00480 tfScalar r,p,y;
00481 pose.getBasis().getRPY(r, p, y);
00482 ROS_DEBUG("Object orientation was updated from wall normals: %f", y * 180./M_PI);
00483
00484 } else {
00485 tfScalar r,p,y;
00486 pose.getBasis().getRPY(r, p, y);
00487 ROS_DEBUG("View angle was used as object orientation: %f", y * 180./M_PI);
00488 }
00489 }
00490
00491
00492
00493
00494
00495 float support = 0.0;
00496 if (!percept->info.object_id.empty()) {
00497 support = percept->info.object_support;
00498 } else if (!percept->info.class_id.empty()) {
00499 support = percept->info.class_support + support_added_by_percept_verification;
00500 }
00501
00502 if (support == 0.0) {
00503 ROS_WARN("Ignoring percept with support == 0.0");
00504 return;
00505 }
00506
00507
00508 model.lock();
00509
00510
00511 ObjectPtr object;
00512 if (percept->info.object_id.empty()) {
00513 model.getBestCorrespondence(object, pose, covariance, percept->info.class_id, percept->info.name, 1.0f);
00514 } else {
00515 object = model.getObject(percept->info.object_id);
00516 }
00517
00518 if (object && object->getState() < 0) {
00519 ROS_DEBUG("Percept was associated to object %s, which has a fixed state", object->getObjectId().c_str());
00520 model.unlock();
00521 return;
00522 }
00523
00524
00525 if (!object) {
00526 object = model.add(percept->info.class_id, percept->info.object_id);
00527
00528 object->setPose(pose);
00529 object->setCovariance(covariance);
00530 object->setSupport(support);
00531
00532 ROS_INFO("Found new object %s of class %s at (%f,%f)!", object->getObjectId().c_str(), object->getClassId().c_str(), pose.getOrigin().getX(), pose.getOrigin().getY());
00533
00534
00535 } else if (support > 0.0) {
00536
00537 object->intersect(pose, covariance, support);
00538
00539
00540 } else {
00541 object->addSupport(support);
00542 }
00543
00544
00545 if ((object->getState() == ObjectState::UNKNOWN || object->getState() == ObjectState::INACTIVE) && parameter(_pending_support, percept->info.class_id) > 0) {
00546 if (object->getSupport() >= parameter(_pending_support, percept->info.class_id) && (percept->header.stamp - object->getHeader().stamp).toSec() >= parameter(_pending_time, percept->info.class_id)) {
00547 object->setState(ObjectState::PENDING);
00548 }
00549 }
00550 if (object->getState() == ObjectState::PENDING && parameter(_active_support, percept->info.class_id) > 0) {
00551 if (object->getSupport() >= parameter(_active_support, percept->info.class_id) && (percept->header.stamp - object->getHeader().stamp).toSec() >= parameter(_active_time, percept->info.class_id)) {
00552 object->setState(ObjectState::ACTIVE);
00553 }
00554 }
00555
00556
00557 std_msgs::Header header;
00558 header.stamp = percept->header.stamp;
00559 header.frame_id = _frame_id;
00560 object->setHeader(header);
00561
00562
00563 if (!percept->info.name.empty()) object->setName(percept->info.name);
00564
00565
00566 model.unlock();
00567
00568
00569 ServiceClientsWithProperties &object_verification_services = parameter(_object_verification_services, percept->info.class_id);
00570 if (!object_verification_services.empty()) {
00571 hector_worldmodel_msgs::VerifyObject::Request request;
00572 hector_worldmodel_msgs::VerifyObject::Response response;
00573
00574 object->getMessage(request.object);
00575
00576 for(ServiceClientsWithProperties::iterator it = object_verification_services.begin(); it != object_verification_services.end(); ++it) {
00577 if (it->second.hasMember("ignore") && it->second["ignore"]) {
00578 ROS_DEBUG("Calling service %s for object %s, but ignoring its answer...", it->first->getService().c_str(), object->getObjectId().c_str());
00579 it->first->call(request, response);
00580
00581 } else if (it->first->call(request, response)) {
00582 if (response.response == response.DISCARD) {
00583 ROS_DEBUG("Discarded object %s due to DISCARD message from service %s", object->getObjectId().c_str(), it->first->getService().c_str());
00584 object->setState(ObjectState::DISCARDED);
00585 }
00586 if (response.response == response.CONFIRM) {
00587 ROS_DEBUG("We got a CONFIRMation for object %s from service %s!", object->getObjectId().c_str(), it->first->getService().c_str());
00588 object->addSupport(100.0);
00589 }
00590 if (response.response == response.UNKNOWN) {
00591 ROS_DEBUG("Verification service %s cannot help us with object %s at the moment :-(", it->first->getService().c_str(), object->getObjectId().c_str());
00592 }
00593 } else if (it->second.hasMember("required") && it->second["required"]) {
00594 ROS_DEBUG("Discarded object %s as required service %s is not available", object->getObjectId().c_str(), it->first->getService().c_str());
00595 object->setState(ObjectState::DISCARDED);
00596 }
00597 }
00598 }
00599
00600
00601 if (objectPoseDebugPublisher.getNumSubscribers() > 0) {
00602 geometry_msgs::PoseStamped pose;
00603 object->getPose(pose.pose);
00604 pose.header = object->getHeader();
00605 objectPoseDebugPublisher.publish(pose);
00606 }
00607
00608 modelUpdatePublisher.publish(object->getMessage());
00609 publishModel();
00610 }
00611
00612 ObjectTracker::CameraModelPtr ObjectTracker::getCameraModel(const std::string& frame_id, const sensor_msgs::CameraInfo& camera_info) {
00613
00614 CameraModelPtr cameraModel;
00615 if (cameraModels.count(frame_id) == 0) {
00616 cameraModel.reset(new image_geometry::PinholeCameraModel());
00617 if (!cameraModel->fromCameraInfo(camera_info)) {
00618 ROS_ERROR("Could not initialize camera model from CameraInfo given in the percept");
00619 return CameraModelPtr();
00620 }
00621 cameraModels[frame_id] = cameraModel;
00622 } else {
00623 cameraModel = cameraModels[frame_id];
00624 }
00625
00626 return cameraModel;
00627 }
00628
00629 void ObjectTracker::objectAgeingCb(const std_msgs::Float32ConstPtr &ageing) {
00630 ROS_DEBUG("ageing of all objects by %f", ageing->data);
00631
00632
00633 model.lock();
00634
00635 ObjectList objects = model.getObjects();
00636
00637 for(ObjectModel::iterator it = objects.begin(); it != objects.end();) {
00638 ObjectPtr object = *it;
00639
00640
00641 object->setSupport(object->getSupport() - ageing->data);
00642
00643
00644 if (object->getSupport() < _ageing_threshold) {
00645 ROS_INFO("remove object %s with support %f", object->getObjectId().c_str(), object->getSupport());
00646 it = objects.erase(it);
00647 model.remove(object);
00648 } else {
00649 it++;
00650 }
00651 }
00652
00653
00654 model.unlock();
00655 publishModel();
00656 }
00657
00658 void ObjectTracker::modelUpdateCb(const hector_worldmodel_msgs::ObjectModelConstPtr &update)
00659 {
00660 for(hector_worldmodel_msgs::ObjectModel::_objects_type::const_iterator it = update->objects.begin(); it != update->objects.end(); ++it)
00661 {
00662 hector_worldmodel_msgs::AddObject object;
00663 object.request.map_to_next_obstacle = false;
00664 object.request.object = *it;
00665 if (!addObjectCb(object.request, object.response)) {
00666 ROS_WARN("Could not update object %s", it->info.object_id.c_str());
00667 }
00668 }
00669 }
00670
00671 bool ObjectTracker::setObjectStateCb(hector_worldmodel_msgs::SetObjectState::Request& request, hector_worldmodel_msgs::SetObjectState::Response& response) {
00672 model.lock();
00673
00674 ObjectPtr object = model.getObject(request.object_id);
00675 if (!object) {
00676 model.unlock();
00677 return false;
00678 }
00679
00680 object->setState(request.new_state.state);
00681 modelUpdatePublisher.publish(object->getMessage());
00682
00683
00684 double min_distance_between_objects = parameter(_min_distance_between_objects, object->getClassId());
00685 if ((min_distance_between_objects > 0.0) && (request.new_state.state == hector_worldmodel_msgs::ObjectState::CONFIRMED)) {
00686 for(ObjectModel::iterator it = model.begin(); it != model.end(); ++it) {
00687 ObjectPtr other = *it;
00688 if (other == object) continue;
00689 if (other->getClassId() != object->getClassId()) continue;
00690
00691 if (other->getDistance(*object) < min_distance_between_objects) {
00692 other->setState(hector_worldmodel_msgs::ObjectState::DISCARDED);
00693 modelUpdatePublisher.publish(other->getMessage());
00694 }
00695 }
00696 }
00697
00698 model.unlock();
00699 publishModel();
00700 return true;
00701 }
00702
00703 bool ObjectTracker::setObjectNameCb(hector_worldmodel_msgs::SetObjectName::Request& request, hector_worldmodel_msgs::SetObjectName::Response& response) {
00704 model.lock();
00705
00706 ObjectPtr object = model.getObject(request.object_id);
00707 if (!object) {
00708 model.unlock();
00709 return false;
00710 }
00711
00712 object->setName(request.name);
00713 modelUpdatePublisher.publish(object->getMessage());
00714
00715 model.unlock();
00716 publishModel();
00717 return true;
00718 }
00719
00720 bool ObjectTracker::addObjectCb(hector_worldmodel_msgs::AddObject::Request& request, hector_worldmodel_msgs::AddObject::Response& response) {
00721 ObjectPtr object;
00722 bool newObject = false;
00723
00724
00725 if (!request.object.info.object_id.empty()) {
00726 ROS_INFO("add_object service called for known %s object %s in frame %s", request.object.info.class_id.c_str(), request.object.info.object_id.c_str(), request.object.header.frame_id.c_str());
00727 object = model.getObject(request.object.info.object_id);
00728 } else {
00729 ROS_INFO("add_object service called for new %s object in frame %s", request.object.info.class_id.c_str(), request.object.header.frame_id.c_str());
00730 }
00731
00732
00733 if (!object) {
00734 object.reset(new Object(request.object.info.class_id, request.object.info.object_id));
00735 newObject = true;
00736 }
00737
00738 std_msgs::Header header = request.object.header;
00739 if (header.stamp.isZero()) header.stamp = ros::Time::now();
00740
00741 geometry_msgs::PoseWithCovariance pose;
00742 if (request.map_to_next_obstacle) {
00743 pose.covariance = request.object.pose.covariance;
00744 if (!mapToNextObstacle(request.object.pose.pose, header, request.object.info, pose.pose)) {
00745 return false;
00746 }
00747 } else {
00748 pose = request.object.pose;
00749 }
00750
00751
00752 if (pose.covariance[0] == 0 && pose.covariance[7] == 0 && pose.covariance[14] == 0 &&
00753 pose.covariance[21] == 0 && pose.covariance[28] == 0 && pose.covariance[35] == 0) {
00754 pose.covariance.assign(0.0);
00755 pose.covariance[0] = 1.0;
00756 pose.covariance[7] = 1.0;
00757 pose.covariance[14] = 1.0;
00758 }
00759
00760 if (!transformPose(pose, pose, header)) return false;
00761
00762 model.lock();
00763
00764 object->setHeader(header);
00765 object->setPose(pose);
00766 object->setState(request.object.state.state);
00767 object->setSupport(request.object.info.support);
00768
00769 if (newObject) model.add(object);
00770 object->getMessage(response.object);
00771 modelUpdatePublisher.publish(response.object);
00772
00773 model.unlock();
00774
00775 publishModel();
00776 return true;
00777 }
00778
00779 bool ObjectTracker::getObjectModelCb(hector_worldmodel_msgs::GetObjectModel::Request& request, hector_worldmodel_msgs::GetObjectModel::Response& response) {
00780 getMergedModel().getMessage(response.model);
00781 return true;
00782 }
00783
00784 void ObjectTracker::mergeModelCallback(const hector_worldmodel_msgs::ObjectModelConstPtr &new_model, const MergedModelPtr& info)
00785 {
00786 info->model = *new_model;
00787 publishModel();
00788 }
00789
00790 void ObjectTracker::negativeUpdateCallback(const sensor_msgs::CameraInfoConstPtr &camera_info, const NegativeUpdatePtr& info)
00791 {
00792 model.lock();
00793
00794
00795 CameraModelPtr cameraModel = getCameraModel(camera_info->header.frame_id, *camera_info);
00796
00797
00798 tf::StampedTransform cameraInverseTransform;
00799 try {
00800 tf.waitForTransform(camera_info->header.frame_id, _frame_id, camera_info->header.stamp, ros::Duration(1.0));
00801 tf.lookupTransform(camera_info->header.frame_id, _frame_id, camera_info->header.stamp, cameraInverseTransform);
00802 } catch (tf::TransformException& ex) {
00803 ROS_ERROR("%s", ex.what());
00804 return;
00805 }
00806
00807 ROS_DEBUG_NAMED("negative_update", "Doing negative update for frame_id %s", camera_info->header.frame_id.c_str());
00808
00809
00810 for(ObjectModel::iterator it = model.begin(); it != model.end(); ++it) {
00811 const ObjectPtr& object = *it;
00812
00813
00814 if (object->getState() < 0) continue;
00815
00816
00817 if (!info->class_id.empty() && info->class_id != object->getClassId()) {
00818 ROS_DEBUG_NAMED("negative_update", "%s: wrong class_id %s", object->getObjectId().c_str(), object->getClassId().c_str());
00819 continue;
00820 }
00821
00822
00823 if (object->getStamp() >= camera_info->header.stamp - info->not_seen_duration) {
00824 ROS_DEBUG_NAMED("negative_update", "%s: seen %f seconds ago", object->getObjectId().c_str(), (camera_info->header.stamp - object->getStamp()).toSec());
00825 continue;
00826 }
00827
00828
00829 tf::Pose pose;
00830 object->getPose(pose);
00831 pose = cameraInverseTransform * pose;
00832 cv::Point3d xyz(pose.getOrigin().x(), pose.getOrigin().y(), pose.getOrigin().z());
00833
00834
00835 float distance = pose.getOrigin().length();
00836 if (distance < info->min_distance || (info->max_distance > 0.0 && distance > info->max_distance)) {
00837 ROS_DEBUG_NAMED("negative_update", "%s: wrong distance: %f", object->getObjectId().c_str(), distance);
00838 continue;
00839 }
00840
00841
00842 cv::Point2d point = cameraModel->project3dToPixel(xyz);
00843
00844
00845 if (!(point.x > 0 + info->ignore_border_pixels &&
00846 point.x < camera_info->width - info->ignore_border_pixels &&
00847 point.y > 0 + info->ignore_border_pixels &&
00848 point.y < camera_info->height - info->ignore_border_pixels)) {
00849 ROS_DEBUG_NAMED("negative_update", "%s: not within field of view (image coordinates (%f,%f))", object->getObjectId().c_str(), point.x, point.y);
00850 continue;
00851 }
00852
00853
00854 ROS_DEBUG("Doing negative update of %s. Should be at image coordinates (%f,%f).", object->getObjectId().c_str(), point.x, point.y);
00855 object->addSupport(-info->negative_support);
00856 if (object->getSupport() < info->min_support) object->setSupport(info->min_support);
00857 if (object->getSupport() <= parameter(_inactive_support, info->class_id)) {
00858 if (object->getState() == ObjectState::PENDING || object->getState() == ObjectState::ACTIVE)
00859 object->setState(ObjectState::INACTIVE);
00860 }
00861
00862
00863 modelUpdatePublisher.publish(object->getMessage());
00864 }
00865
00866 model.unlock();
00867
00868 }
00869
00870 bool ObjectTracker::mapToNextObstacle(const geometry_msgs::Pose& source, const std_msgs::Header &header, const ObjectInfo &info, geometry_msgs::Pose &mapped) {
00871 Parameters::load(info.class_id);
00872 ros::ServiceClientPtr client = parameter(_distance_to_obstacle_service, info.class_id);
00873
00874 if (!client || !client->exists()) {
00875 ROS_ERROR("Could not map object to next obstacle as the distance service %s is not available", client->getService().c_str());
00876 return false;
00877 }
00878
00879
00880 float distance = parameter(_default_distance, info.class_id);
00881 hector_nav_msgs::GetDistanceToObstacle::Request request;
00882 hector_nav_msgs::GetDistanceToObstacle::Response response;
00883
00884
00885 request.point.header = header;
00886 request.point.point = source.position;
00887
00888
00889
00890
00891 if (client->call(request, response) && response.distance > 0.0) {
00892
00893 distance = std::max(response.distance, 0.0f);
00894 } else {
00895 ROS_DEBUG("Could not map object to next obstacle due to unknown or infinite distance");
00896 return false;
00897 }
00898
00899 tf::Pose sourceTF;
00900 tf::poseMsgToTF(source, sourceTF);
00901 sourceTF.setOrigin(sourceTF.getOrigin().normalized() * distance);
00902 tf::poseTFToMsg(sourceTF, mapped);
00903
00904 return true;
00905 }
00906
00907 bool ObjectTracker::transformPose(const geometry_msgs::Pose& from, geometry_msgs::Pose &to, std_msgs::Header &header, tf::StampedTransform *transform_ptr) {
00908
00909 tf::StampedTransform transform;
00910 try {
00911 tf.waitForTransform(_frame_id, header.frame_id, header.stamp, ros::Duration(1.0));
00912 tf.lookupTransform(_frame_id, header.frame_id, header.stamp, transform);
00913 } catch (tf::TransformException& ex) {
00914 ROS_ERROR("%s", ex.what());
00915 return false;
00916 }
00917
00918 tf::Pose tfPose;
00919 tf::poseMsgToTF(from, tfPose);
00920 tfPose = transform * tfPose;
00921 tf::poseTFToMsg(tfPose, to);
00922
00923 header.frame_id = _frame_id;
00924 if (transform_ptr) *transform_ptr = transform;
00925
00926 return true;
00927 }
00928
00929 bool ObjectTracker::transformPose(const geometry_msgs::PoseWithCovariance& from, geometry_msgs::PoseWithCovariance &to, std_msgs::Header &header) {
00930 tf::StampedTransform transform;
00931
00932 if (!transformPose(from.pose, to.pose, header, &transform)) return false;
00933
00934
00935
00936
00937 return true;
00938 }
00939
00940 ObjectModel ObjectTracker::getMergedModel()
00941 {
00942 if (merged_models.size() == 0) return model;
00943
00944 ROS_DEBUG("Merging object models from %lu sources.", merged_models.size());
00945
00946
00947 ObjectModel merged(model);
00948 for(std::vector<MergedModelPtr>::iterator it = merged_models.begin(); it != merged_models.end(); ++it)
00949 {
00950 merged.mergeWith((*it)->model, tf, (*it)->prefix);
00951 }
00952
00953 return merged;
00954 }
00955
00956 void ObjectTracker::publishModelEvent(const ros::TimerEvent&) {
00957 publishModel();
00958 }
00959
00960 void ObjectTracker::publishModel() {
00961 ObjectModel merged_model = getMergedModel();
00962
00963
00964 modelPublisher.publish(merged_model.getMessage());
00965
00966
00967 visualization_msgs::MarkerArray markers;
00968 merged_model.getVisualization(markers);
00969
00970
00971
00972
00973
00974
00975
00976
00977
00978 drawings.addMarkers(markers);
00979 drawings.sendAndResetData();
00980 }
00981
00982 }
00983
00984
00985 int main(int argc, char **argv)
00986 {
00987 ros::init(argc, argv, "object_tracker");
00988
00989 hector_object_tracker::ObjectTracker tracker;
00990 ros::spin();
00991
00992 exit(0);
00993 }