00001 #include <stdexcept>
00002
00003 #include <boost/filesystem/fstream.hpp>
00004 #include <boost/format.hpp>
00005 #include <boost/scope_exit.hpp>
00006 #include <boost/version.hpp>
00007
00008 #include <dynamic_reconfigure/server.h>
00009 #include <geometry_msgs/PoseWithCovarianceStamped.h>
00010 #include <image_proc/advertisement_checker.h>
00011 #include <image_transport/image_transport.h>
00012 #include <ros/param.h>
00013 #include <ros/ros.h>
00014 #include <ros/transport_hints.h>
00015 #include <sensor_msgs/Image.h>
00016 #include <std_msgs/String.h>
00017 #include <tf/transform_broadcaster.h>
00018
00019 #include <boost/bind.hpp>
00020 #include <visp/vpExponentialMap.h>
00021 #include <visp/vpImage.h>
00022 #include <visp/vpImageConvert.h>
00023 #include <visp/vpCameraParameters.h>
00024
00025 #include "conversion.hh"
00026 #include "callbacks.hh"
00027 #include "file.hh"
00028 #include "names.hh"
00029
00030 #include "tracker.hh"
00031
00032
00033
00034
00035
00036 namespace visp_tracker
00037 {
00038 bool
00039 Tracker::initCallback(visp_tracker::Init::Request& req,
00040 visp_tracker::Init::Response& res)
00041 {
00042 ROS_INFO("Initialization request received.");
00043
00044 res.initialization_succeed = false;
00045
00046
00047 BOOST_SCOPE_EXIT((&res)(tracker_)(&state_)
00048 (&lastTrackedImage_)(&trackerType_))
00049 {
00050 if(!res.initialization_succeed)
00051 {
00052 tracker_->resetTracker();
00053 state_ = WAITING_FOR_INITIALIZATION;
00054 lastTrackedImage_ = 0;
00055
00056 }
00057 } BOOST_SCOPE_EXIT_END;
00058
00059 std::string fullModelPath;
00060 boost::filesystem::ofstream modelStream;
00061
00062
00063 if (!makeModelFile(modelStream, fullModelPath))
00064 return true;
00065
00066
00067 vpMe movingEdge;
00068 visp_tracker::ModelBasedSettingsConfig config;
00069 tracker_->resetTracker();
00070
00071 if(trackerType_!="klt"){
00072 convertInitRequestToVpMe(req, tracker_, movingEdge);
00073
00074 convertVpMeToModelBasedSettingsConfig(movingEdge, tracker_, config);
00075 reconfigureSrv_.updateConfig(config);
00076 }
00077
00078 vpKltOpencv klt;
00079 if(trackerType_!="mbt"){
00080 convertInitRequestToVpKltOpencv(req, tracker_, klt);
00081
00082 convertVpKltOpencvToModelBasedSettingsConfig(klt, tracker_, config);
00083 reconfigureSrv_.updateConfig(config);
00084 }
00085
00086 state_ = WAITING_FOR_INITIALIZATION;
00087 lastTrackedImage_ = 0;
00088
00089
00090 try
00091 {
00092 ROS_DEBUG_STREAM("Trying to load the model: " << fullModelPath);
00093 tracker_->loadModel(fullModelPath.c_str());
00094 modelStream.close();
00095 }
00096 catch(...)
00097 {
00098 ROS_ERROR_STREAM("Failed to load the model: " << fullModelPath);
00099 return true;
00100 }
00101 ROS_DEBUG("Model has been successfully loaded.");
00102
00103
00104 transformToVpHomogeneousMatrix(cMo_, req.initial_cMo);
00105
00106
00107 tracker_->setCovarianceComputation(true);
00108
00109
00110 ROS_INFO_STREAM("Initializing tracker with cMo:\n" << cMo_);
00111 try
00112 {
00113 tracker_->initFromPose(image_, cMo_);
00114 ROS_INFO("Tracker successfully initialized.");
00115
00116 movingEdge.print();
00117 }
00118 catch(const std::string& str)
00119 {
00120 ROS_ERROR_STREAM("Tracker initialization has failed: " << str);
00121 }
00122
00123
00124 res.initialization_succeed = true;
00125 state_ = TRACKING;
00126 return true;
00127 }
00128
00129 void
00130 Tracker::updateMovingEdgeSites(visp_tracker::MovingEdgeSitesPtr sites)
00131 {
00132 if (!sites)
00133 return;
00134
00135 std::list<vpMbtDistanceLine*> linesList;
00136
00137 if(trackerType_!="klt")
00138 dynamic_cast<vpMbEdgeTracker*>(tracker_)->getLline(linesList, 0);
00139
00140 std::list<vpMbtDistanceLine*>::iterator linesIterator =
00141 linesList.begin();
00142 if (linesList.empty())
00143 ROS_DEBUG_THROTTLE(10, "no distance lines");
00144 bool noVisibleLine = true;
00145 for (; linesIterator != linesList.end(); ++linesIterator)
00146 {
00147 vpMbtDistanceLine* line = *linesIterator;
00148
00149 if (line && line->isVisible() && line->meline)
00150 {
00151 std::list<vpMeSite>::const_iterator sitesIterator =
00152 line->meline->list.begin();
00153 if (line->meline->list.empty())
00154 ROS_DEBUG_THROTTLE(10, "no moving edge for a line");
00155 for (; sitesIterator != line->meline->list.end(); ++sitesIterator)
00156 {
00157 visp_tracker::MovingEdgeSite movingEdgeSite;
00158 movingEdgeSite.x = sitesIterator->ifloat;
00159 movingEdgeSite.y = sitesIterator->jfloat;
00160 movingEdgeSite.suppress = sitesIterator->suppress;
00161 sites->moving_edge_sites.push_back (movingEdgeSite);
00162 }
00163 noVisibleLine = false;
00164 }
00165 }
00166 if (noVisibleLine)
00167 ROS_DEBUG_THROTTLE(10, "no distance lines");
00168 }
00169
00170 void
00171 Tracker::updateKltPoints(visp_tracker::KltPointsPtr klt)
00172 {
00173 if (!klt)
00174 return;
00175
00176 vpMbHiddenFaces<vpMbtKltPolygon> *poly_lst;
00177 std::map<int, vpImagePoint> *map_klt;
00178
00179 if(trackerType_!="mbt") {
00180 poly_lst = &dynamic_cast<vpMbKltTracker*>(tracker_)->getFaces();
00181
00182 for(unsigned int i = 0 ; i < poly_lst->size() ; i++)
00183 {
00184 if((*poly_lst)[i])
00185 {
00186 map_klt = &((*poly_lst)[i]->getCurrentPoints());
00187
00188 if(map_klt->size() > 3)
00189 {
00190 for (std::map<int, vpImagePoint>::iterator it=map_klt->begin(); it!=map_klt->end(); ++it)
00191 {
00192 visp_tracker::KltPoint kltPoint;
00193 kltPoint.id = it->first;
00194 kltPoint.i = it->second.get_i();
00195 kltPoint.j = it->second.get_j();
00196 klt->klt_points_positions.push_back (kltPoint);
00197 }
00198 }
00199 }
00200 }
00201 }
00202 }
00203
00204 void Tracker::checkInputs()
00205 {
00206 ros::V_string topics;
00207 topics.push_back(rectifiedImageTopic_);
00208 checkInputs_.start(topics, 60.0);
00209 }
00210
00211 Tracker::Tracker(ros::NodeHandle& nh,
00212 ros::NodeHandle& privateNh,
00213 volatile bool& exiting,
00214 unsigned queueSize)
00215 : exiting_ (exiting),
00216 queueSize_(queueSize),
00217 nodeHandle_(nh),
00218 nodeHandlePrivate_(privateNh),
00219 imageTransport_(nodeHandle_),
00220 state_(WAITING_FOR_INITIALIZATION),
00221 image_(),
00222 cameraPrefix_(),
00223 rectifiedImageTopic_(),
00224 cameraInfoTopic_(),
00225 vrmlPath_(),
00226 cameraSubscriber_(),
00227 mutex_ (),
00228 reconfigureSrv_(mutex_, nodeHandlePrivate_),
00229 resultPublisher_(),
00230 transformationPublisher_(),
00231 movingEdgeSitesPublisher_(),
00232 kltPointsPublisher_(),
00233 initService_(),
00234 header_(),
00235 info_(),
00236 kltTracker_(),
00237 movingEdge_(),
00238 cameraParameters_(),
00239 lastTrackedImage_(),
00240 checkInputs_(nodeHandle_, ros::this_node::getName()),
00241 cMo_ (),
00242 listener_ (),
00243 worldFrameId_ (),
00244 compensateRobotMotion_ (false),
00245 transformBroadcaster_ (),
00246 childFrameId_ (),
00247 objectPositionHintSubscriber_ (),
00248 objectPositionHint_ ()
00249 {
00250
00251 cMo_.eye();
00252
00253
00254
00255 nodeHandlePrivate_.param<std::string>("camera_prefix", cameraPrefix_, "");
00256 nodeHandlePrivate_.param<std::string>("tracker_type", trackerType_, "mbt");
00257 if(trackerType_=="mbt")
00258 tracker_ = new vpMbEdgeTracker();
00259 else if(trackerType_=="klt")
00260 tracker_ = new vpMbKltTracker();
00261 else
00262 tracker_ = new vpMbEdgeKltTracker();
00263
00264 if (cameraPrefix_.empty ())
00265 {
00266 ROS_FATAL
00267 ("The camera_prefix parameter not set.\n"
00268 "Please relaunch the tracker while setting this parameter, i.e.\n"
00269 "$ rosrun visp_tracker tracker _camera_prefix:=/my/camera");
00270 ros::shutdown ();
00271 return;
00272 }
00273 nodeHandle_.setParam("camera_prefix", cameraPrefix_);
00274
00275 nodeHandle_.param<std::string>("frame_id", childFrameId_, "object_position");
00276
00277
00278 nodeHandle_.param<std::string>("world_frame_id", worldFrameId_, "/odom");
00279 nodeHandle_.param<bool>
00280 ("compensate_robot_motion", compensateRobotMotion_, false);
00281
00282
00283 rectifiedImageTopic_ =
00284 ros::names::resolve(cameraPrefix_ + "/image_rect");
00285
00286
00287 checkInputs();
00288
00289
00290 resultPublisher_ =
00291 nodeHandle_.advertise<geometry_msgs::PoseWithCovarianceStamped>
00292 (visp_tracker::object_position_covariance_topic, queueSize_);
00293
00294 transformationPublisher_ =
00295 nodeHandle_.advertise<geometry_msgs::TransformStamped>
00296 (visp_tracker::object_position_topic, queueSize_);
00297
00298
00299 movingEdgeSitesPublisher_ =
00300 nodeHandle_.advertise<visp_tracker::MovingEdgeSites>
00301 (visp_tracker::moving_edge_sites_topic, queueSize_);
00302
00303
00304 kltPointsPublisher_ =
00305 nodeHandle_.advertise<visp_tracker::KltPoints>
00306 (visp_tracker::klt_points_topic, queueSize_);
00307
00308
00309 cameraSubscriber_ =
00310 imageTransport_.subscribeCamera
00311 (rectifiedImageTopic_, queueSize_,
00312 bindImageCallback(image_, header_, info_));
00313
00314
00315 typedef boost::function<
00316 void (const geometry_msgs::TransformStampedConstPtr&)>
00317 objectPositionHintCallback_t;
00318 objectPositionHintCallback_t callback =
00319 boost::bind (&Tracker::objectPositionHintCallback, this, _1);
00320 objectPositionHintSubscriber_ =
00321 nodeHandle_.subscribe<geometry_msgs::TransformStamped>
00322 ("object_position_hint", queueSize_, callback);
00323
00324
00325 movingEdge_.initMask();
00326 if(trackerType_!="klt"){
00327 vpMbEdgeTracker* t = dynamic_cast<vpMbEdgeTracker*>(tracker_);
00328 t->setMovingEdge(movingEdge_);
00329 }
00330
00331 if(trackerType_!="mbt"){
00332 vpMbKltTracker* t = dynamic_cast<vpMbKltTracker*>(tracker_);
00333 t->setKltOpencv(kltTracker_);
00334 }
00335
00336
00337 reconfigureSrv_t::CallbackType f =
00338 boost::bind(&reconfigureCallback, boost::ref(tracker_),
00339 boost::ref(image_), boost::ref(movingEdge_), boost::ref(kltTracker_),
00340 boost::ref(trackerType_), boost::ref(mutex_), _1, _2);
00341 reconfigureSrv_.setCallback(f);
00342
00343
00344 waitForImage();
00345 if (this->exiting())
00346 return;
00347 if (!image_.getWidth() || !image_.getHeight())
00348 throw std::runtime_error("failed to retrieve image");
00349
00350
00351 initializeVpCameraFromCameraInfo(cameraParameters_, info_);
00352
00353
00354 if (cameraParameters_.get_px () == 0.
00355 || cameraParameters_.get_px () == 1.
00356 || cameraParameters_.get_py () == 0.
00357 || cameraParameters_.get_py () == 1.
00358 || cameraParameters_.get_u0 () == 0.
00359 || cameraParameters_.get_u0 () == 1.
00360 || cameraParameters_.get_v0 () == 0.
00361 || cameraParameters_.get_v0 () == 1.)
00362 ROS_WARN ("Dubious camera parameters detected.\n"
00363 "\n"
00364 "It seems that the matrix P from your camera\n"
00365 "calibration topics is wrong.\n"
00366 "The tracker will continue anyway, but you\n"
00367 "should double check your calibration data,\n"
00368 "especially if the model re-projection fails.\n"
00369 "\n"
00370 "This warning is triggered is px, py, u0 or v0\n"
00371 "is set to 0. or 1. (exactly).");
00372
00373 tracker_->setCameraParameters(cameraParameters_);
00374 tracker_->setDisplayFeatures(false);
00375
00376 ROS_INFO_STREAM(cameraParameters_);
00377
00378
00379 initCallback_t initCallback =
00380 boost::bind(&Tracker::initCallback, this, _1, _2);
00381
00382 initService_ = nodeHandle_.advertiseService
00383 (visp_tracker::init_service, initCallback);
00384 }
00385
00386 Tracker::~Tracker()
00387 {
00388 delete tracker_;
00389 }
00390
00391 void Tracker::spin()
00392 {
00393 ros::Rate loopRateTracking(100);
00394 tf::Transform transform;
00395 std_msgs::Header lastHeader;
00396 while (!exiting())
00397 {
00398
00399
00400
00401 if (header_.seq < lastHeader.seq)
00402 lastTrackedImage_ = 0;
00403
00404 if (lastTrackedImage_ < header_.seq)
00405 {
00406 lastTrackedImage_ = header_.seq;
00407
00408
00409
00410 if (compensateRobotMotion_)
00411 try
00412 {
00413 tf::StampedTransform stampedTransform;
00414 listener_.lookupTransform
00415 (header_.frame_id,
00416 header_.stamp,
00417 header_.frame_id,
00418 lastHeader.stamp,
00419 worldFrameId_,
00420 stampedTransform
00421 );
00422 vpHomogeneousMatrix newMold;
00423 transformToVpHomogeneousMatrix (newMold, stampedTransform);
00424 cMo_ = newMold * cMo_;
00425 }
00426 catch(tf::TransformException& e)
00427 {}
00428
00429
00430
00431 if (state_ == LOST)
00432 {
00433
00434
00435 if (ros::Time::now () - objectPositionHint_.header.stamp
00436 < ros::Duration (1.))
00437 transformToVpHomogeneousMatrix
00438 (cMo_, objectPositionHint_.transform);
00439 }
00440
00441
00442
00443 if (state_ == TRACKING || state_ == LOST)
00444 try
00445 {
00446 mutex_.lock();
00447 tracker_->setPose(image_, cMo_);
00448 tracker_->track(image_);
00449 tracker_->getPose(cMo_);
00450 mutex_.unlock();
00451 }
00452 catch(...)
00453 {
00454 ROS_WARN_THROTTLE(10, "tracking lost");
00455 state_ = LOST;
00456 }
00457
00458
00459 if (state_ == TRACKING)
00460 {
00461 geometry_msgs::Transform transformMsg;
00462 vpHomogeneousMatrixToTransform(transformMsg, cMo_);
00463
00464
00465 if (transformationPublisher_.getNumSubscribers () > 0)
00466 {
00467 geometry_msgs::TransformStampedPtr objectPosition
00468 (new geometry_msgs::TransformStamped);
00469 objectPosition->header = header_;
00470 objectPosition->child_frame_id = childFrameId_;
00471 objectPosition->transform = transformMsg;
00472 transformationPublisher_.publish(objectPosition);
00473 }
00474
00475
00476 if (resultPublisher_.getNumSubscribers () > 0)
00477 {
00478 geometry_msgs::PoseWithCovarianceStampedPtr result
00479 (new geometry_msgs::PoseWithCovarianceStamped);
00480 result->header = header_;
00481 result->pose.pose.position.x =
00482 transformMsg.translation.x;
00483 result->pose.pose.position.y =
00484 transformMsg.translation.y;
00485 result->pose.pose.position.z =
00486 transformMsg.translation.z;
00487
00488 result->pose.pose.orientation.x =
00489 transformMsg.rotation.x;
00490 result->pose.pose.orientation.y =
00491 transformMsg.rotation.y;
00492 result->pose.pose.orientation.z =
00493 transformMsg.rotation.z;
00494 result->pose.pose.orientation.w =
00495 transformMsg.rotation.w;
00496 const vpMatrix& covariance =
00497 tracker_->getCovarianceMatrix();
00498 for (unsigned i = 0; i < covariance.getRows(); ++i)
00499 for (unsigned j = 0; j < covariance.getCols(); ++j)
00500 {
00501 unsigned idx = i * covariance.getCols() + j;
00502 if (idx >= 36)
00503 continue;
00504 result->pose.covariance[idx] = covariance[i][j];
00505 }
00506 resultPublisher_.publish(result);
00507 }
00508
00509
00510 if (movingEdgeSitesPublisher_.getNumSubscribers () > 0)
00511 {
00512 visp_tracker::MovingEdgeSitesPtr sites
00513 (new visp_tracker::MovingEdgeSites);
00514 updateMovingEdgeSites(sites);
00515 sites->header = header_;
00516 movingEdgeSitesPublisher_.publish(sites);
00517 }
00518
00519 if (kltPointsPublisher_.getNumSubscribers () > 0)
00520 {
00521 visp_tracker::KltPointsPtr klt
00522 (new visp_tracker::KltPoints);
00523 updateKltPoints(klt);
00524 klt->header = header_;
00525 kltPointsPublisher_.publish(klt);
00526 }
00527
00528
00529 transform.setOrigin
00530 (tf::Vector3(transformMsg.translation.x,
00531 transformMsg.translation.y,
00532 transformMsg.translation.z));
00533 transform.setRotation
00534 (tf::Quaternion
00535 (transformMsg.rotation.x,
00536 transformMsg.rotation.y,
00537 transformMsg.rotation.z,
00538 transformMsg.rotation.w));
00539 transformBroadcaster_.sendTransform
00540 (tf::StampedTransform
00541 (transform,
00542 header_.stamp,
00543 header_.frame_id,
00544 childFrameId_));
00545 }
00546 }
00547 lastHeader = header_;
00548 spinOnce();
00549 loopRateTracking.sleep();
00550 }
00551 }
00552
00553
00554
00555 void
00556 Tracker::waitForImage()
00557 {
00558 ros::Rate loop_rate(10);
00559 while (!exiting()
00560 && (!image_.getWidth() || !image_.getHeight())
00561 && (!info_ || info_->K[0] == 0.))
00562 {
00563 ROS_INFO_THROTTLE(1, "waiting for a rectified image...");
00564 spinOnce();
00565 loop_rate.sleep();
00566 }
00567 }
00568
00569 void
00570 Tracker::objectPositionHintCallback
00571 (const geometry_msgs::TransformStampedConstPtr& transform)
00572 {
00573 objectPositionHint_ = *transform;
00574 }
00575
00576 }