object_tracker.cpp
Go to the documentation of this file.
00001 #include "object_tracker.h"
00002 
00003 #include <hector_nav_msgs/GetDistanceToObstacle.h>
00004 #include <hector_worldmodel_msgs/VerifyObject.h>
00005 #include <hector_worldmodel_msgs/VerifyPercept.h>
00006 
00007 #include <math.h>
00008 
00009 #include "Object.h"
00010 #include "parameters.h"
00011 
00012 #include <boost/algorithm/string.hpp>
00013 
00014 namespace hector_object_tracker {
00015 
00016 ObjectTracker::ObjectTracker()
00017 {
00018   ros::NodeHandle priv_nh("~");
00019   priv_nh.param("frame_id", _frame_id, std::string("map"));
00020   priv_nh.param("worldmodel_ns", _worldmodel_ns, std::string("worldmodel"));
00021   priv_nh.param("ageing_threshold", _ageing_threshold, 1.0);
00022   priv_nh.param("publish_interval", _publish_interval, 0.0);
00023 
00024   parameter(_project_objects)   = false;
00025   parameter(_default_distance)  = 1.0;
00026   parameter(_distance_variance) = pow(1.0, 2);
00027   parameter(_angle_variance)    = pow(10.0 * M_PI / 180.0, 2);
00028   parameter(_min_height)        = -999.9;
00029   parameter(_max_height)        = 999.9;
00030   parameter(_pending_support)   = 0.0;
00031   parameter(_pending_time)      = 0.0;
00032   parameter(_active_support)    = 0.0;
00033   parameter(_active_time)       = 0.0;
00034   parameter(_inactive_support)  = 0.0;
00035   parameter(_inactive_time)     = 0.0;
00036 
00037   std_msgs::ColorRGBA default_color;
00038   default_color.r = 0.8; default_color.a = 1.0;
00039   parameter(_marker_color)      = default_color;
00040 
00041   Parameters::load();
00042 
00043   ros::NodeHandle worldmodel(_worldmodel_ns);
00044   imagePerceptSubscriber = worldmodel.subscribe("image_percept", 10, &ObjectTracker::imagePerceptCb, this);
00045   posePerceptSubscriber = worldmodel.subscribe("pose_percept", 10, &ObjectTracker::posePerceptCb, this);
00046   objectAgeingSubscriber = worldmodel.subscribe("object_ageing", 10, &ObjectTracker::objectAgeingCb, this);
00047   modelPublisher = worldmodel.advertise<hector_worldmodel_msgs::ObjectModel>("objects", 10, false);
00048   modelUpdatePublisher = worldmodel.advertise<hector_worldmodel_msgs::Object>("object", 10, false);
00049   modelUpdateSubscriber = worldmodel.subscribe<hector_worldmodel_msgs::ObjectModel>("update", 10, &ObjectTracker::modelUpdateCb, this);
00050 
00051   Object::setNamespace(_worldmodel_ns);
00052   model.setFrameId(_frame_id);
00053 
00054   sysCommandSubscriber = nh.subscribe("/syscommand", 10, &ObjectTracker::sysCommandCb, this);
00055   poseDebugPublisher = priv_nh.advertise<geometry_msgs::PoseStamped>("pose", 10, false);
00056   pointDebugPublisher = priv_nh.advertise<geometry_msgs::PointStamped>("point", 10, false);
00057 
00058   XmlRpc::XmlRpcValue verification_services;
00059   if (priv_nh.getParam("verification_services", verification_services) && verification_services.getType() == XmlRpc::XmlRpcValue::TypeArray) {
00060     for(int i = 0; i < verification_services.size(); ++i) {
00061       XmlRpc::XmlRpcValue item = verification_services[i];
00062       if (!item.hasMember("service")) {
00063         ROS_ERROR("Verification service %d could not be intialized: unknown service name", i);
00064         continue;
00065       }
00066       if (!item.hasMember("type")) {
00067         ROS_ERROR("Verification service %d could not be intialized: unknown service type", i);
00068         continue;
00069       }
00070 
00071       ros::ServiceClient client;
00072       if (item["type"] == "object") {
00073         client = nh.serviceClient<hector_worldmodel_msgs::VerifyObject>(item["service"]);
00074       } else if (item["type"] == "percept") {
00075         client = nh.serviceClient<hector_worldmodel_msgs::VerifyPercept>(item["service"]);
00076       }
00077 
00078       if (!client.isValid()) continue;
00079       if (!client.exists()) {
00080         if (!item.hasMember("required") || !item["required"]) {
00081           ROS_WARN("Verification service %s is not (yet) there...", client.getService().c_str());
00082         } else {
00083           ROS_WARN("Required verification service %s is not available... waiting...", client.getService().c_str());
00084           while(ros::ok() && !client.waitForExistence(ros::Duration(1.0)));
00085         }
00086       }
00087 
00088       if (item.hasMember("class_id")) {
00089         verificationServices[item["type"]][item["class_id"]].push_back(std::make_pair(client, item));
00090         ROS_INFO("Using %s verification service %s for objects of class %s", std::string(item["type"]).c_str(), client.getService().c_str(), std::string(item["class_id"]).c_str());
00091       } else {
00092         verificationServices[item["type"]]["*"].push_back(std::make_pair(client, item));
00093         ROS_INFO("Using %s verification service %s", std::string(item["type"]).c_str(), client.getService().c_str());
00094       }
00095     }
00096   }
00097 
00098   setObjectState = worldmodel.advertiseService("set_object_state", &ObjectTracker::setObjectStateCb, this);
00099   setObjectName  = worldmodel.advertiseService("set_object_name", &ObjectTracker::setObjectNameCb, this);
00100   addObject = worldmodel.advertiseService("add_object", &ObjectTracker::addObjectCb, this);
00101   getObjectModel = worldmodel.advertiseService("get_object_model", &ObjectTracker::getObjectModelCb, this);
00102 
00103   XmlRpc::XmlRpcValue merge;
00104   if (priv_nh.getParam("merge", merge) && merge.getType() == XmlRpc::XmlRpcValue::TypeArray) {
00105     for(int i = 0; i < merge.size(); ++i) {
00106       const MergedModelPtr& info = *merged_models.insert(merged_models.end(), boost::make_shared<MergedModelInfo>());
00107       ros::SubscribeOptions options = ros::SubscribeOptions::create<hector_worldmodel_msgs::ObjectModel>(std::string(), 10, boost::bind(&ObjectTracker::mergeModelCallback, this, _1, info), ros::VoidConstPtr(), 0);
00108       if (merge[i].getType() == XmlRpc::XmlRpcValue::TypeStruct && merge[i].hasMember("topic")) {
00109         options.topic = static_cast<std::string>(merge[i]["topic"]);
00110         if (merge[i].hasMember("prefix")) info->prefix = static_cast<std::string>(merge[i]["prefix"]);
00111       } else if (merge[i].getType() == XmlRpc::XmlRpcValue::TypeString) {
00112         options.topic = static_cast<std::string>(merge[i]);
00113       }
00114 
00115       if (options.topic.empty()) {
00116         ROS_ERROR("Each entry in parameter merge must be either a string containing the topic to subscribe or a struct.");
00117         continue;
00118       }
00119       info->subscriber = nh.subscribe(options);
00120 
00121       ROS_INFO("Subscribed to object model %s.", options.topic.c_str());
00122     }
00123   }
00124 
00125   XmlRpc::XmlRpcValue negative_update;
00126   if (priv_nh.getParam("negative_update", negative_update) && negative_update.getType() == XmlRpc::XmlRpcValue::TypeArray) {
00127     for(int i = 0; i < negative_update.size(); ++i) {
00128       if (negative_update[i].getType() != XmlRpc::XmlRpcValue::TypeStruct) continue;
00129       const NegativeUpdatePtr& info = *negativeUpdate.insert(negativeUpdate.end(), boost::make_shared<NegativeUpdateInfo>());
00130 
00131       // default options
00132       info->negative_support = 0.0;
00133       info->min_support = 0.0;
00134       info->min_distance = 0.0;
00135       info->max_distance = 0.0;
00136       info->ignore_border_pixels = 0.0;
00137       info->not_seen_duration = ros::Duration(0.5);
00138 
00139       ros::SubscribeOptions options = ros::SubscribeOptions::create<sensor_msgs::CameraInfo>(std::string(), 10, boost::bind(&ObjectTracker::negativeUpdateCallback, this, _1, info), ros::VoidConstPtr(), 0);
00140       if (negative_update[i].hasMember("topic"))                options.topic = static_cast<std::string>(negative_update[i]["topic"]);
00141       if (negative_update[i].hasMember("class_id"))             info->class_id = static_cast<std::string>(negative_update[i]["class_id"]);
00142       if (negative_update[i].hasMember("negative_support"))     info->negative_support = static_cast<double>(negative_update[i]["negative_support"]);
00143       if (negative_update[i].hasMember("min_support"))          info->min_support = static_cast<double>(negative_update[i]["min_support"]);
00144       if (negative_update[i].hasMember("min_distance"))         info->min_distance = static_cast<double>(negative_update[i]["min_distance"]);
00145       if (negative_update[i].hasMember("max_distance"))         info->max_distance = static_cast<double>(negative_update[i]["max_distance"]);
00146       if (negative_update[i].hasMember("ignore_border_pixels")) info->ignore_border_pixels = static_cast<double>(negative_update[i]["ignore_border_pixels"]);
00147       if (negative_update[i].hasMember("not_seen_duration"))    info->not_seen_duration = ros::Duration(static_cast<double>(negative_update[i]["not_seen_duration"]));
00148 
00149       if (options.topic.empty()) {
00150         ROS_ERROR("Each entry in parameter negative_update must have a camera_info topic to subscribe to.");
00151         continue;
00152       }
00153       info->subscriber = nh.subscribe(options);
00154     }
00155   }
00156 
00157   if (_publish_interval > 0.0) {
00158     publishTimer = nh.createTimer(ros::Duration(_publish_interval), &ObjectTracker::publishModelEvent, this, false, true);
00159   }
00160 }
00161 
00162 ObjectTracker::~ObjectTracker()
00163 {}
00164 
00165 void ObjectTracker::sysCommandCb(const std_msgs::StringConstPtr &sysCommand)
00166 {
00167   if (sysCommand->data == "reset") {
00168     ROS_INFO("Resetting object model.");
00169     model.reset();
00170     drawings.reset();
00171     for(std::vector<MergedModelPtr>::const_iterator it =  merged_models.begin(); it != merged_models.end(); ++it) {
00172       (*it)->model.reset();
00173     }
00174   }
00175 }
00176 
00177 void ObjectTracker::imagePerceptCb(const hector_worldmodel_msgs::ImagePerceptConstPtr &percept)
00178 {
00179   hector_worldmodel_msgs::PosePerceptPtr posePercept(new hector_worldmodel_msgs::PosePercept);
00180   tf::Pose pose;
00181 
00182   Parameters::load(percept->info.class_id);
00183 
00184   ROS_DEBUG("Incoming image percept with image coordinates [%f,%f] in frame %s", percept->x, percept->y, percept->header.frame_id.c_str());
00185 
00186   posePercept->header = percept->header;
00187   posePercept->info = percept->info;
00188 
00189   // retrieve distance information
00190   float distance = percept->distance > 0.0 ? percept->distance : parameter(_default_distance, percept->info.class_id);
00191 
00192   // retrieve camera model from either the cache or from CameraInfo given in the percept
00193   CameraModelPtr cameraModel = getCameraModel(percept->header.frame_id, percept->camera_info);
00194 
00195   // transform Point using the camera mode
00196   cv::Point2d rectified = cameraModel->rectifyPoint(cv::Point2d(percept->x, percept->y));
00197   cv::Point3d direction_cv = cameraModel->projectPixelTo3dRay(rectified);
00198 //  pose.setOrigin(tf::Point(direction_cv.z, -direction_cv.x, -direction_cv.y).normalized() * distance);
00199 //  tf::Quaternion direction(atan2(-direction_cv.x, direction_cv.z), atan2(direction_cv.y, sqrt(direction_cv.z*direction_cv.z + direction_cv.x*direction_cv.x)), 0.0);
00200   pose.setOrigin(tf::Point(direction_cv.x, direction_cv.y, direction_cv.z).normalized() * distance);
00201   tf::Quaternion direction;
00202   direction.setEuler(atan2(direction_cv.x, direction_cv.z), atan2(-direction_cv.y, sqrt(direction_cv.z*direction_cv.z + direction_cv.x*direction_cv.x)), 0.0);
00203   direction = direction * tf::Quaternion(0.5, -0.5, 0.5, 0.5);
00204   pose.getBasis().setRotation(direction);
00205 
00206   ROS_DEBUG("--> Rectified image coordinates: [%f,%f]", rectified.x, rectified.y);
00207   ROS_DEBUG("--> Projected 3D ray (OpenCV):   [%f,%f,%f]", direction_cv.x, direction_cv.y, direction_cv.z);
00208   ROS_DEBUG("--> Projected 3D ray (tf):       [%f,%f,%f]", pose.getOrigin().x(), pose.getOrigin().y(),pose.getOrigin().z());
00209 
00210   if (percept->distance == 0.0 && parameter(_project_objects, percept->info.class_id)) {
00211     hector_nav_msgs::GetDistanceToObstacle::Request request;
00212     hector_nav_msgs::GetDistanceToObstacle::Response response;
00213 
00214     // project image percept to the next obstacle
00215     request.point.header = percept->header;
00216     tf::pointTFToMsg(pose.getOrigin(), request.point.point);
00217     if (parameter(_distance_to_obstacle_service, percept->info.class_id).call(request, response)) {
00218       if (response.distance > 0.0) {
00219         // distance = std::max(response.distance - 0.1f, 0.0f);
00220         distance = std::max(response.distance, 0.0f);
00221         pose.setOrigin(pose.getOrigin().normalized() * distance);
00222         ROS_DEBUG("Projected percept to a distance of %.1f m", distance);
00223       } else {
00224         ROS_WARN("Ignoring percept due to unknown or infinite distance: service %s returned %f", parameter(_distance_to_obstacle_service, percept->info.class_id).getService().c_str(), response.distance);
00225         return;
00226       }
00227     } else {
00228       ROS_WARN("Ignoring percept due to unknown or infinite distance: service %s is not available", parameter(_distance_to_obstacle_service, percept->info.class_id).getService().c_str());
00229       return;
00230     }
00231   }
00232 
00233   // set variance
00234   Eigen::Matrix3f covariance(Eigen::Matrix3f::Zero());
00235   covariance(0,0) = std::max(distance*distance, 1.0f) * tan(parameter(_angle_variance, percept->info.class_id));
00236   covariance(1,1) = covariance(0,0);
00237   covariance(2,2) = parameter(_distance_variance, percept->info.class_id);
00238 
00239   // rotate covariance matrix depending on the position in the image
00240   Eigen::Quaterniond eigen_rotation(direction.w(), direction.x(), direction.y(), direction.z());
00241   Eigen::Matrix3f rotation_camera_object(eigen_rotation.toRotationMatrix().cast<float>());
00242   covariance = rotation_camera_object * covariance * rotation_camera_object.transpose();
00243 
00244   // fill posePercept
00245   tf::poseTFToMsg(pose, posePercept->pose.pose);
00246   // tf::quaternionTFToMsg(direction, posePercept->pose.pose.orientation);
00247   posePercept->pose.covariance[0]  = covariance(0,0);
00248   posePercept->pose.covariance[1]  = covariance(0,1);
00249   posePercept->pose.covariance[2]  = covariance(0,2);
00250   posePercept->pose.covariance[6]  = covariance(1,0);
00251   posePercept->pose.covariance[7]  = covariance(1,1);
00252   posePercept->pose.covariance[8]  = covariance(1,2);
00253   posePercept->pose.covariance[12] = covariance(2,0);
00254   posePercept->pose.covariance[13] = covariance(2,1);
00255   posePercept->pose.covariance[14] = covariance(2,2);
00256 
00257   // forward to posePercept callback
00258   posePerceptCb(posePercept);
00259 }
00260 
00261 void ObjectTracker::posePerceptCb(const hector_worldmodel_msgs::PosePerceptConstPtr &percept)
00262 {
00263   Parameters::load(percept->info.class_id);
00264 
00265   // publish pose in source frame for debugging purposes
00266   if (poseDebugPublisher.getNumSubscribers() > 0) {
00267     geometry_msgs::PoseStamped pose;
00268     pose.pose = percept->pose.pose;
00269     pose.header = percept->header;
00270     poseDebugPublisher.publish(pose);
00271   }
00272 
00273   // call percept verification
00274   float support_added_by_percept_verification = 0.0;
00275   if (verificationServices.count("percept") > 0) {
00276     hector_worldmodel_msgs::VerifyPercept::Request request;
00277     hector_worldmodel_msgs::VerifyPercept::Response response;
00278 
00279     request.percept = *percept;
00280 
00281     std::vector<VerificationService> services(verificationServices["percept"]["*"]);
00282     if (!percept->info.class_id.empty()) {
00283       services.insert(services.end(), verificationServices["percept"][percept->info.class_id].begin(), verificationServices["percept"][percept->info.class_id].end());
00284     }
00285 
00286     for(std::vector<VerificationService>::iterator it = services.begin(); it != services.end(); ++it) {
00287       if (it->second.hasMember("ignore") && it->second["ignore"]) {
00288         ROS_DEBUG("Calling service %s for percept of class '%s'', but ignoring its answer...", it->first.getService().c_str(), percept->info.class_id.c_str());
00289         it->first.call(request, response);
00290 
00291       } else if (it->first.call(request, response)) {
00292         if (response.response == response.DISCARD) {
00293           ROS_DEBUG("Discarded percept of class '%s' due to DISCARD message from service %s", percept->info.class_id.c_str(), it->first.getService().c_str());
00294           return;
00295         }
00296         if (response.response == response.CONFIRM) {
00297           ROS_DEBUG("We got a CONFIRMation for percept of class '%s' from service %s!", percept->info.class_id.c_str(), it->first.getService().c_str());
00298           support_added_by_percept_verification = 100.0;
00299         }
00300         if (response.response == response.UNKNOWN) {
00301           ROS_DEBUG("Verification service %s cannot help us with percept of class %s at the moment :-(", it->first.getService().c_str(), percept->info.class_id.c_str());
00302         }
00303       } else if (it->second.hasMember("required") && it->second["required"]) {
00304         ROS_DEBUG("Discarded percept of class '%s' as required service %s is not available", percept->info.class_id.c_str(), it->first.getService().c_str());
00305         return;
00306       }
00307     }
00308   }
00309 
00310   // convert pose in tf
00311   tf::Pose pose;
00312   tf::poseMsgToTF(percept->pose.pose, pose);
00313 
00314   // retrieve distance information
00315 //  float distance = pose.getOrigin().length();
00316 //  if (parameter(_project_objects, percept->info.class_id)) {
00317 //    hector_nav_msgs::GetDistanceToObstacle::Request request;
00318 //    hector_nav_msgs::GetDistanceToObstacle::Response response;
00319 
00320 //    // project image percept to the next obstacle
00321 //    request.point.header = percept->header;
00322 //    tf::pointTFToMsg(pose.getOrigin(), request.point.point);
00323 //    if (parameter(_distance_to_obstacle_service, percept->info.class_id).call(request, response) && response.distance > 0.0) {
00324 //      // distance = std::max(response.distance - 0.1f, 0.0f);
00325 //      distance = std::max(response.distance, 0.0f);
00326 //      pose.setOrigin(pose.getOrigin().normalized() * distance);
00327 //      ROS_DEBUG("Projected percept to a distance of %.1f m", distance);
00328 //    } else {
00329 //      ROS_DEBUG("Ignoring percept due to unknown or infinite distance");
00330 //      return;
00331 //    }
00332 //  }
00333 
00334   // extract variance matrix
00335   Eigen::Matrix<float,6,6> temp;
00336   for(unsigned int i = 0; i < 36; ++i) temp(i) = percept->pose.covariance[i];
00337   Eigen::Matrix3f covariance = temp.block<3,3>(0,0);
00338 
00339   // if no variance is given, set variance to default
00340   if (covariance.isZero()) {
00341     covariance(0,0) = parameter(_distance_variance, percept->info.class_id);
00342     covariance(1,1) = parameter(_distance_variance, percept->info.class_id);
00343     covariance(2,2) = parameter(_distance_variance, percept->info.class_id);
00344   }
00345 
00346   // project percept coordinates to map frame
00347   tf::StampedTransform cameraTransform;
00348   if (!_frame_id.empty() && tf.resolve(percept->header.frame_id) != tf.resolve(_frame_id)) {
00349     ROS_DEBUG("Transforming percept from %s frame to %s frame", percept->header.frame_id.c_str(), _frame_id.c_str());
00350 
00351     // retrieve camera transformation from tf
00352     try {
00353       tf.waitForTransform(_frame_id, percept->header.frame_id, percept->header.stamp, ros::Duration(1.0));
00354       tf.lookupTransform(_frame_id, percept->header.frame_id, percept->header.stamp, cameraTransform);
00355     } catch (tf::TransformException& ex) {
00356       ROS_ERROR("%s", ex.what());
00357       return;
00358     }
00359 
00360     pose = cameraTransform * pose;
00361 
00362     // rotate covariance matrix to map coordinates
00363     Eigen::Quaterniond eigen_rotation(cameraTransform.getRotation().w(), cameraTransform.getRotation().x(), cameraTransform.getRotation().y(), cameraTransform.getRotation().z());
00364     Eigen::Matrix3f rotation_map_camera(eigen_rotation.toRotationMatrix().cast<float>());
00365     covariance = rotation_map_camera * covariance * rotation_map_camera.transpose();
00366   }
00367   Eigen::Vector3f position(pose.getOrigin().x(), pose.getOrigin().y(), pose.getOrigin().z());
00368 
00369   // check height
00370   float relative_height = pose.getOrigin().z() - cameraTransform.getOrigin().z();
00371   if (relative_height < parameter(_min_height, percept->info.class_id) || relative_height > parameter(_max_height, percept->info.class_id)) {
00372     ROS_INFO("Discarding %s percept with height %f", percept->info.class_id.c_str(), relative_height);
00373     return;
00374   }
00375 
00376   // fix height (assume camera is always at 0.475m)
00377   // pose.setOrigin(tf::Point(pose.getOrigin().x(), pose.getOrigin().y(), pose.getOrigin().z() - cameraTransform.getOrigin().z() + 0.475f));
00378 
00379   // calculate observation support
00380   float support = 0.0;
00381   if (!percept->info.object_id.empty()) {
00382     support = percept->info.object_support;
00383   } else if (!percept->info.class_id.empty()) {
00384     support = percept->info.class_support + support_added_by_percept_verification;
00385   }
00386 
00387   if (support == 0.0) {
00388     ROS_WARN("Ignoring percept with support == 0.0");
00389     return;
00390   }
00391 
00392   // lock model
00393   model.lock();
00394 
00395   // find correspondence
00396   ObjectPtr object;
00397   if (percept->info.object_id.empty()) {
00398     model.getBestCorrespondence(object, position, covariance, percept->info.class_id, 1.0f);
00399   } else {
00400     object = model.getObject(percept->info.object_id);
00401   }
00402 
00403   if (object && object->getState() < 0) {
00404     ROS_DEBUG("Percept was associated to object %s, which has a fixed state", object->getObjectId().c_str());
00405     model.unlock();
00406     return;
00407   }
00408 
00409   // create new object
00410   if (!object) {
00411     object = model.add(percept->info.class_id, percept->info.object_id);
00412 
00413     object->setPosition(position);
00414     object->setCovariance(covariance);
00415     object->setSupport(support);
00416 
00417     ROS_INFO("Found new object %s of class %s at (%f,%f)!", object->getObjectId().c_str(), object->getClassId().c_str(), position.x(), position.y());
00418 
00419   // or update existing object
00420   } else if (support > 0.0) {
00421     //object->update(position, covariance, support);
00422     object->intersect(position, covariance, support);
00423 
00424   // or simply decrease support
00425   } else {
00426     object->addSupport(support);
00427   }
00428 
00429   // update object state
00430   if ((object->getState() == ObjectState::UNKNOWN || object->getState() == ObjectState::INACTIVE) &&  parameter(_pending_support, percept->info.class_id) > 0) {
00431     if (object->getSupport() >= parameter(_pending_support, percept->info.class_id) && (percept->header.stamp - object->getHeader().stamp).toSec() >= parameter(_pending_time, percept->info.class_id)) {
00432       object->setState(ObjectState::PENDING);
00433     }
00434   }
00435   if (object->getState() == ObjectState::PENDING &&  parameter(_active_support, percept->info.class_id) > 0) {
00436     if (object->getSupport() >= parameter(_active_support, percept->info.class_id) && (percept->header.stamp - object->getHeader().stamp).toSec() >= parameter(_active_time, percept->info.class_id)) {
00437       object->setState(ObjectState::ACTIVE);
00438     }
00439   }
00440 
00441   // set object orientation
00442   object->setOrientation(pose.getRotation());
00443 
00444   // update object header
00445   std_msgs::Header header;
00446   header.stamp    = percept->header.stamp;
00447   header.frame_id = _frame_id;
00448   object->setHeader(header);
00449 
00450   // update object name
00451   if (!percept->info.name.empty()) object->setName(percept->info.name);
00452 
00453   // unlock model
00454   model.unlock();
00455 
00456   // call object verification
00457   if (verificationServices.count("object") > 0) {
00458     hector_worldmodel_msgs::VerifyObject::Request request;
00459     hector_worldmodel_msgs::VerifyObject::Response response;
00460 
00461     object->getMessage(request.object);
00462 
00463     std::vector<VerificationService> services(verificationServices["object"]["*"]);
00464     if (!object->getClassId().empty()) {
00465       services.insert(services.end(), verificationServices["object"][object->getClassId()].begin(), verificationServices["object"][object->getClassId()].end());
00466     }
00467 
00468     for(std::vector<VerificationService>::iterator it = services.begin(); it != services.end(); ++it) {
00469       if (it->second.hasMember("ignore") && it->second["ignore"]) {
00470         ROS_DEBUG("Calling service %s for object %s, but ignoring its answer...", it->first.getService().c_str(), object->getObjectId().c_str());
00471         it->first.call(request, response);
00472 
00473       } else if (it->first.call(request, response)) {
00474         if (response.response == response.DISCARD) {
00475           ROS_DEBUG("Discarded object %s due to DISCARD message from service %s", object->getObjectId().c_str(), it->first.getService().c_str());
00476           object->setState(ObjectState::DISCARDED);
00477         }
00478         if (response.response == response.CONFIRM) {
00479           ROS_DEBUG("We got a CONFIRMation for object %s from service %s!", object->getObjectId().c_str(), it->first.getService().c_str());
00480           object->addSupport(100.0);
00481         }
00482         if (response.response == response.UNKNOWN) {
00483           ROS_DEBUG("Verification service %s cannot help us with object %s at the moment :-(", it->first.getService().c_str(), object->getObjectId().c_str());
00484         }
00485       } else if (it->second.hasMember("required") && it->second["required"]) {
00486         ROS_DEBUG("Discarded object %s as required service %s is not available", object->getObjectId().c_str(), it->first.getService().c_str());
00487         object->setState(ObjectState::DISCARDED);
00488       }
00489     }
00490   }
00491 
00492   // publish point in target frame for debugging purposes
00493   if (pointDebugPublisher.getNumSubscribers() > 0) {
00494     geometry_msgs::PointStamped point;
00495     geometry_msgs::Pose pose;
00496     object->getPose(pose);
00497     point.point = pose.position;
00498     point.header = object->getHeader();
00499     pointDebugPublisher.publish(point);
00500   }
00501 
00502   modelUpdatePublisher.publish(object->getMessage());
00503   publishModel();
00504 }
00505 
00506 ObjectTracker::CameraModelPtr ObjectTracker::getCameraModel(const std::string& frame_id, const sensor_msgs::CameraInfo& camera_info) {
00507   // retrieve camera model from either the cache or from CameraInfo given in the percept
00508   CameraModelPtr cameraModel;
00509   if (cameraModels.count(frame_id) == 0) {
00510     cameraModel.reset(new image_geometry::PinholeCameraModel());
00511     if (!cameraModel->fromCameraInfo(camera_info)) {
00512       ROS_ERROR("Could not initialize camera model from CameraInfo given in the percept");
00513       return CameraModelPtr();
00514     }
00515     cameraModels[frame_id] = cameraModel;
00516   } else {
00517     cameraModel = cameraModels[frame_id];
00518   }
00519 
00520   return cameraModel;
00521 }
00522 
00523 void ObjectTracker::objectAgeingCb(const std_msgs::Float32ConstPtr &ageing) {
00524   ROS_DEBUG("ageing of all objects by %f", ageing->data);
00525 
00526   // lock model
00527   model.lock();
00528 
00529   ObjectList objects = model.getObjects();
00530   
00531   for(ObjectModel::iterator it = objects.begin(); it != objects.end();) {
00532     ObjectPtr object = *it;
00533 
00534     // update support
00535     object->setSupport(object->getSupport() - ageing->data);
00536 
00537     // remove the object if the support is to low
00538     if (object->getSupport() < _ageing_threshold) {
00539       ROS_INFO("remove object %s with support %f", object->getObjectId().c_str(), object->getSupport());
00540       it = objects.erase(it);
00541       model.remove(object);
00542     } else {
00543       it++;
00544     }
00545   }
00546 
00547   // unlock model
00548   model.unlock();
00549   publishModel();
00550 }
00551 
00552 void ObjectTracker::modelUpdateCb(const hector_worldmodel_msgs::ObjectModelConstPtr &update)
00553 {
00554   for(hector_worldmodel_msgs::ObjectModel::_objects_type::const_iterator it = update->objects.begin(); it != update->objects.end(); ++it)
00555   {
00556     hector_worldmodel_msgs::AddObject object;
00557     object.request.map_to_next_obstacle = false;
00558     object.request.object = *it;
00559     if (!addObjectCb(object.request, object.response)) {
00560       ROS_WARN("Could not update object %s", it->info.object_id.c_str());
00561     }
00562   }
00563 }
00564 
00565 bool ObjectTracker::setObjectStateCb(hector_worldmodel_msgs::SetObjectState::Request& request, hector_worldmodel_msgs::SetObjectState::Response& response) {
00566   model.lock();
00567 
00568   ObjectPtr object = model.getObject(request.object_id);
00569   if (!object) {
00570     model.unlock();
00571     return false;
00572   }
00573 
00574   object->setState(request.new_state.state);
00575   modelUpdatePublisher.publish(object->getMessage());
00576 
00577   model.unlock();
00578   publishModel();
00579   return true;
00580 }
00581 
00582 bool ObjectTracker::setObjectNameCb(hector_worldmodel_msgs::SetObjectName::Request& request, hector_worldmodel_msgs::SetObjectName::Response& response) {
00583   model.lock();
00584 
00585   ObjectPtr object = model.getObject(request.object_id);
00586   if (!object) {
00587     model.unlock();
00588     return false;
00589   }
00590 
00591   object->setName(request.name);
00592   modelUpdatePublisher.publish(object->getMessage());
00593 
00594   model.unlock();
00595   publishModel();
00596   return true;
00597 }
00598 
00599 bool ObjectTracker::addObjectCb(hector_worldmodel_msgs::AddObject::Request& request, hector_worldmodel_msgs::AddObject::Response& response) {
00600   ObjectPtr object;
00601   bool newObject = false;
00602 
00603   // check if object already exist
00604   if (!request.object.info.object_id.empty()) {
00605     ROS_INFO("add_object service called for known %s object %s in frame %s", request.object.info.class_id.c_str(), request.object.info.object_id.c_str(), request.object.header.frame_id.c_str());
00606     object = model.getObject(request.object.info.object_id);
00607   } else {
00608     ROS_INFO("add_object service called for new %s object in frame %s", request.object.info.class_id.c_str(), request.object.header.frame_id.c_str());
00609   }
00610 
00611   // create a new object if object does not exist
00612   if (!object) {
00613     object.reset(new Object(request.object.info.class_id, request.object.info.object_id));
00614     newObject = true;
00615   }
00616 
00617   std_msgs::Header header = request.object.header;
00618   if (header.stamp.isZero()) header.stamp = ros::Time::now();
00619 
00620   geometry_msgs::PoseWithCovariance pose;
00621   if (request.map_to_next_obstacle) {
00622     pose.covariance = request.object.pose.covariance;
00623     if (!mapToNextObstacle(request.object.pose.pose, header, request.object.info, pose.pose)) {
00624       return false;
00625     }
00626   } else {
00627     pose = request.object.pose;
00628   }
00629 
00630   // if no variance is given, set variance to default
00631   if (pose.covariance[0] == 0  && pose.covariance[7] == 0  && pose.covariance[14] == 0 &&
00632       pose.covariance[21] == 0 && pose.covariance[28] == 0 && pose.covariance[35] == 0) {
00633     pose.covariance.assign(0.0);
00634     pose.covariance[0] = 1.0;
00635     pose.covariance[7] = 1.0;
00636     pose.covariance[14] = 1.0;
00637   }
00638 
00639   if (!transformPose(pose, pose, header)) return false;
00640 
00641   model.lock();
00642 
00643   object->setHeader(header);
00644   object->setPose(pose);
00645   object->setState(request.object.state.state);
00646   object->setSupport(request.object.info.support);
00647 
00648   if (newObject) model.add(object);
00649   object->getMessage(request.object);
00650   modelUpdatePublisher.publish(response.object);
00651 
00652   model.unlock();
00653 
00654   publishModel();
00655   return true;
00656 }
00657 
00658 bool ObjectTracker::getObjectModelCb(hector_worldmodel_msgs::GetObjectModel::Request& request, hector_worldmodel_msgs::GetObjectModel::Response& response) {
00659   getMergedModel().getMessage(response.model);
00660   return true;
00661 }
00662 
00663 void ObjectTracker::mergeModelCallback(const hector_worldmodel_msgs::ObjectModelConstPtr &new_model, const MergedModelPtr& info)
00664 {
00665   info->model = *new_model;
00666   publishModel();
00667 }
00668 
00669 void ObjectTracker::negativeUpdateCallback(const sensor_msgs::CameraInfoConstPtr &camera_info, const NegativeUpdatePtr& info)
00670 {
00671   model.lock();
00672 
00673   // retrieve camera model from either the cache or from CameraInfo given in the percept
00674   CameraModelPtr cameraModel = getCameraModel(camera_info->header.frame_id, *camera_info);
00675 
00676   // get camera transform
00677   tf::StampedTransform cameraInverseTransform;
00678   try {
00679     tf.waitForTransform(camera_info->header.frame_id, _frame_id, camera_info->header.stamp, ros::Duration(1.0));
00680     tf.lookupTransform(camera_info->header.frame_id, _frame_id, camera_info->header.stamp, cameraInverseTransform);
00681   } catch (tf::TransformException& ex) {
00682     ROS_ERROR("%s", ex.what());
00683     return;
00684   }
00685 
00686   ROS_DEBUG_NAMED("negative_update", "Doing negative update for frame_id %s", camera_info->header.frame_id.c_str());
00687 
00688   // iterate through objects
00689   for(ObjectModel::iterator it = model.begin(); it != model.end(); ++it) {
00690     const ObjectPtr& object = *it;
00691 
00692     // do not update objects with state < 0
00693     if (object->getState() < 0) continue;
00694 
00695     // check class_id
00696     if (!info->class_id.empty() && info->class_id != object->getClassId()) {
00697       ROS_DEBUG_NAMED("negative_update", "%s: wrong class_id %s", object->getObjectId().c_str(), object->getClassId().c_str());
00698       continue;
00699     }
00700 
00701     // check last seen stamp
00702     if (object->getStamp() >= camera_info->header.stamp - info->not_seen_duration) {
00703       ROS_DEBUG_NAMED("negative_update", "%s: seen %f seconds ago", object->getObjectId().c_str(), (camera_info->header.stamp - object->getStamp()).toSec());
00704       continue;
00705     }
00706 
00707     // transform object pose to camera coordinates
00708     tf::Pose pose;
00709     object->getPose(pose);
00710     pose = cameraInverseTransform * pose;
00711     cv::Point3d xyz(pose.getOrigin().x(), pose.getOrigin().y(), pose.getOrigin().z());
00712 
00713     // check distance
00714     float distance = pose.getOrigin().length();
00715     if (distance < info->min_distance || (info->max_distance > 0.0 && distance > info->max_distance)) {
00716       ROS_DEBUG_NAMED("negative_update", "%s: wrong distance: %f", object->getObjectId().c_str(), distance);
00717       continue;
00718     }
00719 
00720     // get image point
00721     cv::Point2d point = cameraModel->project3dToPixel(xyz);
00722 
00723     // check if object is within field of view
00724     if (!(point.x > 0 + info->ignore_border_pixels &&
00725           point.x < camera_info->width - info->ignore_border_pixels &&
00726           point.y > 0 + info->ignore_border_pixels &&
00727           point.y < camera_info->height - info->ignore_border_pixels)) {
00728       ROS_DEBUG_NAMED("negative_update", "%s: not within field of view (image coordinates (%f,%f))", object->getObjectId().c_str(), point.x, point.y);
00729       continue;
00730     }
00731 
00732     // ==> do negative update
00733     ROS_DEBUG("Doing negative update of %s. Should be at image coordinates (%f,%f).", object->getObjectId().c_str(), point.x, point.y);
00734     object->addSupport(-info->negative_support);
00735     if (object->getSupport() < info->min_support) object->setSupport(info->min_support);
00736     if (object->getSupport() <= parameter(_inactive_support, info->class_id)) {
00737       if (object->getState() == ObjectState::PENDING || object->getState() == ObjectState::ACTIVE)
00738         object->setState(ObjectState::INACTIVE);
00739     }
00740 
00741     // publish object update
00742     modelUpdatePublisher.publish(object->getMessage());
00743   }
00744 
00745   model.unlock();
00746   // publishModel();
00747 }
00748 
00749 bool ObjectTracker::mapToNextObstacle(const geometry_msgs::Pose& source, const std_msgs::Header &header, const ObjectInfo &info, geometry_msgs::Pose &mapped) {
00750   Parameters::load(info.class_id);
00751 
00752   if (!parameter(_distance_to_obstacle_service, info.class_id).exists()) {
00753     ROS_ERROR("Could not map object to next obstacle as the distance service %s is not available", parameter(_distance_to_obstacle_service, info.class_id).getService().c_str());
00754     return false;
00755   }
00756 
00757   // retrieve distance information
00758   float distance = parameter(_default_distance, info.class_id);
00759   hector_nav_msgs::GetDistanceToObstacle::Request request;
00760   hector_nav_msgs::GetDistanceToObstacle::Response response;
00761 
00762   // project image percept to the next obstacle
00763   request.point.header = header;
00764   request.point.point = source.position;
00765   // tf::pointTFToMsg(cameraTransform.getOrigin(), request.pose.pose.position);
00766   // tf::Quaternion direction_quaternion = tf::Quaternion(atan(direction.y/direction.x), atan(direction.z/direction.x), 0.0);
00767   // direction_quaternion *= cameraTransform.getRotation();
00768   // tf::quaternionTFToMsg(direction_quaternion, request.pose.pose.orientation);
00769   if (parameter(_distance_to_obstacle_service, info.class_id).call(request, response) && response.distance > 0.0) {
00770     // distance = std::max(response.distance - 0.1f, 0.0f);
00771     distance = std::max(response.distance, 0.0f);
00772   } else {
00773     ROS_DEBUG("Could not map object to next obstacle due to unknown or infinite distance");
00774     return false;
00775   }
00776 
00777   tf::Pose sourceTF;
00778   tf::poseMsgToTF(source, sourceTF);
00779   sourceTF.setOrigin(sourceTF.getOrigin().normalized() * distance);
00780   tf::poseTFToMsg(sourceTF, mapped);
00781 
00782   return true;
00783 }
00784 
00785 bool ObjectTracker::transformPose(const geometry_msgs::Pose& from, geometry_msgs::Pose &to, std_msgs::Header &header, tf::StampedTransform *transform_ptr) {
00786   // retrieve transformation from tf
00787   tf::StampedTransform transform;
00788   try {
00789     tf.waitForTransform(_frame_id, header.frame_id, header.stamp, ros::Duration(1.0));
00790     tf.lookupTransform(_frame_id, header.frame_id, header.stamp, transform);
00791   } catch (tf::TransformException& ex) {
00792     ROS_ERROR("%s", ex.what());
00793     return false;
00794   }
00795 
00796   tf::Pose tfPose;
00797   tf::poseMsgToTF(from, tfPose);
00798   tfPose = transform * tfPose;
00799   tf::poseTFToMsg(tfPose, to);
00800 
00801   header.frame_id = _frame_id;
00802   if (transform_ptr) *transform_ptr = transform;
00803 
00804   return true;
00805 }
00806 
00807 bool ObjectTracker::transformPose(const geometry_msgs::PoseWithCovariance& from, geometry_msgs::PoseWithCovariance &to, std_msgs::Header &header) {
00808   tf::StampedTransform transform;
00809 
00810   if (!transformPose(from.pose, to.pose, header, &transform)) return false;
00811 
00812   // TODO
00813   // rotate covariance matrix
00814 
00815   return true;
00816 }
00817 
00818 ObjectModel ObjectTracker::getMergedModel()
00819 {
00820   if (merged_models.size() == 0) return model;
00821 
00822   ROS_DEBUG("Merging object models from %lu sources.", merged_models.size());
00823 
00824   // merge with other models from merged_models
00825   ObjectModel merged(model);
00826   for(std::vector<MergedModelPtr>::iterator it = merged_models.begin(); it != merged_models.end(); ++it)
00827   {
00828     merged.mergeWith((*it)->model, tf, (*it)->prefix);
00829   }
00830 
00831   return merged;
00832 }
00833 
00834 void ObjectTracker::publishModelEvent(const ros::TimerEvent&) {
00835   publishModel();
00836 }
00837 
00838 void ObjectTracker::publishModel() {
00839   ObjectModel merged_model = getMergedModel();
00840 
00841   // Publish all model data on topic /objects
00842   modelPublisher.publish(merged_model.getMessage());
00843 
00844   // Visualize victims and covariance in rviz
00845   visualization_msgs::MarkerArray markers;
00846   merged_model.getVisualization(markers);
00847 //  drawings.setTime(ros::Time::now());
00848 //  model.lock();
00849 //  for(ObjectModel::iterator it = model.begin(); it != model.end(); ++it) {
00850 //    ObjectPtr object = *it;
00851 //    drawings.addMarkers(object->getVisualization());
00852 //    drawings.setColor(1.0, 0.0, 0.0, drawings.markerArray.markers.back().color.a);
00853 //    drawings.drawCovariance(Eigen::Vector2f(object->getPosition().x(), object->getPosition().y()), object->getCovariance().block<2,2>(0,0));
00854 //  }
00855 //  model.unlock();
00856   drawings.addMarkers(markers);
00857   drawings.sendAndResetData();
00858 }
00859 
00860 } // namespace hector_object_tracker
00861 
00862 
00863 int main(int argc, char **argv)
00864 {
00865   ros::init(argc, argv, "object_tracker");
00866 
00867   hector_object_tracker::ObjectTracker tracker;
00868   ros::spin();
00869 
00870   exit(0);
00871 }


hector_object_tracker
Author(s): Johannes Meyer
autogenerated on Mon Oct 6 2014 00:36:54