tracking.cpp
Go to the documentation of this file.
00001 #include <ros/ros.h>
00002 
00003 #include "tracking.h"
00004 #include "tools.h"
00005 
00006 using namespace tools;
00007 
00008 namespace slam
00009 {
00010 
00011   Tracking::Tracking(Publisher *f_pub, Graph *graph)
00012     : f_pub_(f_pub), graph_(graph), frame_id_(0), kf_id_(0), jump_detected_(false), secs_to_filter_(10.0)
00013   {}
00014 
00015   void Tracking::run()
00016   {
00017     // Init
00018     state_ = NOT_INITIALIZED;
00019 
00020     ros::NodeHandle nh;
00021     ros::NodeHandle nhp("~");
00022 
00023     pose_pub_ = nhp.advertise<nav_msgs::Odometry>("odometry", 1);
00024     pc_pub_ = nhp.advertise<sensor_msgs::PointCloud2>("pointcloud", 5);
00025     overlapping_pub_ = nhp.advertise<sensor_msgs::Image>("tracking_overlap", 1, true);
00026 
00027     image_transport::ImageTransport it(nh);
00028 
00029     image_transport::SubscriberFilter left_sub, right_sub;
00030     message_filters::Subscriber<sensor_msgs::CameraInfo> left_info_sub, right_info_sub;
00031     message_filters::Subscriber<nav_msgs::Odometry> odom_sub;
00032     message_filters::Subscriber<sensor_msgs::Range> range_sub;
00033     message_filters::Subscriber<sensor_msgs::PointCloud2> cloud_sub;
00034 
00035     // Message sync
00036     boost::shared_ptr<Sync> sync;
00037     odom_sub      .subscribe(nh, params_.odom_topic, 20);
00038     range_sub     .subscribe(nh, params_.range_topic, 20);
00039     left_sub      .subscribe(it, params_.camera_topic+"/left/image_rect_color", 5);
00040     right_sub     .subscribe(it, params_.camera_topic+"/right/image_rect_color", 5);
00041     left_info_sub .subscribe(nh, params_.camera_topic+"/left/camera_info",  5);
00042     right_info_sub.subscribe(nh, params_.camera_topic+"/right/camera_info", 5);
00043     cloud_sub     .subscribe(nh, params_.camera_topic+"/points2", 5);
00044     sync.reset(new Sync(SyncPolicy(10), odom_sub, range_sub, left_sub, right_sub, left_info_sub, right_info_sub, cloud_sub) );
00045     sync->registerCallback(bind(&Tracking::msgsCallback, this, _1, _2, _3, _4, _5, _6, _7));
00046 
00047     // Create directory to store the keyframes
00048     string keyframes_dir = WORKING_DIRECTORY + "keyframes";
00049     if (fs::is_directory(keyframes_dir))
00050       fs::remove_all(keyframes_dir);
00051     fs::path dir1(keyframes_dir);
00052     if (!fs::create_directory(dir1))
00053       ROS_ERROR("[Localization:] ERROR -> Impossible to create the keyframes directory.");
00054 
00055     // Create the directory to store the pointcloud
00056     string pointclouds_dir = WORKING_DIRECTORY + "pointclouds";
00057     if (fs::is_directory(pointclouds_dir))
00058       fs::remove_all(pointclouds_dir);
00059     fs::path dir2(pointclouds_dir);
00060     if (!fs::create_directory(dir2))
00061       ROS_ERROR("[Localization:] ERROR -> Impossible to create the pointclouds directory.");
00062 
00063     ros::spin();
00064   }
00065 
00066   void Tracking::msgsCallback(
00067       const nav_msgs::Odometry::ConstPtr& odom_msg,
00068       const sensor_msgs::Range::ConstPtr& range_msg,
00069       const sensor_msgs::ImageConstPtr& l_img_msg,
00070       const sensor_msgs::ImageConstPtr& r_img_msg,
00071       const sensor_msgs::CameraInfoConstPtr& l_info_msg,
00072       const sensor_msgs::CameraInfoConstPtr& r_info_msg,
00073       const sensor_msgs::PointCloud2ConstPtr& cloud_msg)
00074   {
00075 
00076     tf::Transform c_odom_robot = Tools::odomTotf(*odom_msg);
00077     double timestamp = l_img_msg->header.stamp.toSec();
00078 
00079     cv::Mat l_img, r_img;
00080     Tools::imgMsgToMat(*l_img_msg, *r_img_msg, l_img, r_img);
00081 
00082     PointCloudRGB::Ptr pcl_cloud(new PointCloudRGB);
00083     fromROSMsg(*cloud_msg, *pcl_cloud);
00084 
00085     if (state_ == NOT_INITIALIZED)
00086     {
00087       // Transformation between odometry and camera
00088       if (!getOdom2CameraTf(*odom_msg, *l_img_msg, odom2camera_))
00089       {
00090         ROS_WARN("[Localization:] Impossible to transform odometry to camera frame.");
00091         return;
00092       }
00093 
00094       // Camera parameters
00095       Tools::getCameraModel(*l_info_msg, *r_info_msg, camera_model_, camera_matrix_);
00096 
00097       // Set graph properties
00098       graph_->setCamera2Odom(odom2camera_.inverse());
00099       graph_->setCameraMatrix(camera_matrix_);
00100       graph_->setCameraModel(camera_model_.left());
00101 
00102       // The initial frame
00103       c_frame_ = Frame(l_img, r_img, camera_model_, timestamp);
00104 
00105       // Filter cloud
00106       PointCloudRGB::Ptr cloud_filtered(new PointCloudRGB);
00107       cloud_filtered = filterCloud(pcl_cloud);
00108       c_frame_.setPointCloud(cloud_filtered);
00109 
00110       // No corrections apply yet
00111       prev_robot_pose_ = c_odom_robot;
00112 
00113       // For the first frame, its estimated pose will coincide with odometry
00114       tf::Transform c_odom_camera = c_odom_robot * odom2camera_;
00115       c_frame_.setCameraPose(c_odom_camera);
00116 
00117       bool frame_ok = addFrameToMap(range_msg->range);
00118       if (frame_ok)
00119       {
00120         // Store the odometry for this frame
00121         odom_pose_history_.push_back(c_odom_camera);
00122 
00123         // Mark as initialized
00124         state_ = INITIALIZING;
00125       }
00126     }
00127     else
00128     {
00129       // The current frame
00130       c_frame_ = Frame(l_img, r_img, camera_model_, timestamp);
00131 
00132       // Get the pose of the last frame id
00133       tf::Transform last_frame_pose;
00134       bool graph_ready = graph_->getFramePose(frame_id_ - 1, last_frame_pose);
00135       if (!graph_ready) return;
00136 
00137       // Previous/current frame odometry difference
00138       tf::Transform c_camera_odom_pose = c_odom_robot * odom2camera_;
00139       tf::Transform odom_diff = odom_pose_history_[odom_pose_history_.size()-1].inverse() * c_camera_odom_pose;
00140 
00141       // Refine its position relative to the previous frame
00142       tf::Transform correction;
00143       tf::Transform c_camera_pose;
00144       int num_inliers = LC_MIN_INLIERS;
00145       if (params_.refine)
00146       {
00147         tf::Transform p2c_diff;
00148         bool succeed = refinePose(p_frame_, c_frame_, p2c_diff, num_inliers);
00149         double error = Tools::poseDiff3D(p2c_diff, odom_diff);
00150         bool refine_valid = succeed && error < 0.2;
00151 
00152         if (refine_valid)
00153           correction = p2c_diff;
00154         else
00155           correction = odom_diff;
00156       }
00157       else
00158       {
00159         correction = odom_diff;
00160       }
00161       c_camera_pose = last_frame_pose * correction;
00162 
00163       // Filter cloud
00164       PointCloudRGB::Ptr cloud_filtered(new PointCloudRGB);
00165       cloud_filtered = filterCloud(pcl_cloud);
00166 
00167       // Set frame data
00168       c_frame_.setCameraPose(c_camera_pose);
00169       c_frame_.setInliersNumWithPreviousFrame(num_inliers);
00170       c_frame_.setPointCloud(cloud_filtered);
00171 
00172       // Need new keyframe
00173       bool is_new_keyframe = needNewKeyFrame(range_msg->range);
00174       if (is_new_keyframe)
00175       {
00176         // Store the camera odometry for this keyframe
00177         tf::Transform c_odom_camera = c_odom_robot * odom2camera_;
00178         odom_pose_history_.push_back(c_odom_camera);
00179       }
00180     }
00181 
00182     // Convert camera to robot pose
00183     tf::Transform robot_pose = c_frame_.getCameraPose() * odom2camera_.inverse();
00184 
00185     // Detect a big jump
00186     double jump = Tools::poseDiff3D(robot_pose, prev_robot_pose_);
00187     if (!jump_detected_ && jump > 0.8)
00188     {
00189       jump_time_ = ros::WallTime::now();
00190       jump_detected_ = true;
00191     }
00192     if (jump_detected_ && ( ros::WallTime::now().toSec() - jump_time_.toSec() > secs_to_filter_ )    )
00193     {
00194       jump_detected_ = false;
00195     }
00196 
00197     tf::Transform pose = robot_pose;
00198     if (jump_detected_)
00199     {
00200       // Filter big jumps
00201       double m = 1/(10*secs_to_filter_);
00202       double factor = m * (ros::WallTime::now().toSec() - jump_time_.toSec());
00203 
00204       double c_x = pose.getOrigin().x();
00205       double c_y = pose.getOrigin().y();
00206       double c_z = pose.getOrigin().z();
00207 
00208       double p_x = prev_robot_pose_.getOrigin().x();
00209       double p_y = prev_robot_pose_.getOrigin().y();
00210       double p_z = prev_robot_pose_.getOrigin().z();
00211 
00212       double x = factor * c_x + (1-factor) * p_x;
00213       double y = factor * c_y + (1-factor) * p_y;
00214       double z = factor * c_z + (1-factor) * p_z;
00215 
00216       tf::Vector3 filtered_pose(x, y, z);
00217       pose.setOrigin(filtered_pose);
00218     }
00219 
00220     // Publish
00221     nav_msgs::Odometry pose_msg = *odom_msg;
00222     tf::poseTFToMsg(pose, pose_msg.pose.pose);
00223     pose_pub_.publish(pose_msg);
00224     tf_broadcaster_.sendTransform(tf::StampedTransform(pose, odom_msg->header.stamp, "map", odom_msg->child_frame_id));
00225 
00226     // Store
00227     prev_robot_pose_ = pose;
00228   }
00229 
00230   bool Tracking::getOdom2CameraTf(nav_msgs::Odometry odom_msg,
00231       sensor_msgs::Image img_msg,
00232       tf::StampedTransform &transform)
00233   {
00234     // Init the transform
00235     transform.setIdentity();
00236 
00237     try
00238     {
00239       // Extract the transform
00240       tf_listener_.lookupTransform(odom_msg.child_frame_id,
00241           img_msg.header.frame_id,
00242           ros::Time(0),
00243           transform);
00244     }
00245     catch (tf::TransformException ex)
00246     {
00247       ROS_WARN("%s", ex.what());
00248       return false;
00249     }
00250     return true;
00251   }
00252 
00253   bool Tracking::needNewKeyFrame(double range)
00254   {
00255     // Init initialization
00256     if (state_ == INITIALIZING)
00257     {
00258       bool valid_frame = addFrameToMap(range);
00259       if (valid_frame)
00260         state_ = WORKING;
00261       return valid_frame;
00262     }
00263     else
00264     {
00265       // Compute overlap to decide if new keyframe is needed.
00266 
00267       // The transformation between last and current keyframe
00268       tf::Transform last_2_current = p_frame_.getCameraPose().inverse() * c_frame_.getCameraPose();
00269 
00270       // Speedup the process by converting the current pointcloud to xyz
00271       PointCloudXYZ::Ptr cloud_xyz(new PointCloudXYZ);
00272       pcl::copyPointCloud(*c_frame_.getPointCloud(), *cloud_xyz);
00273 
00274       // Transform the current pointcloud
00275       Eigen::Affine3d tf_eigen;
00276       transformTFToEigen(last_2_current, tf_eigen);
00277       PointCloudXYZ::Ptr cloud_xyz_moved(new PointCloudXYZ);
00278       pcl::transformPointCloud(*cloud_xyz, *cloud_xyz_moved, tf_eigen);
00279 
00280       // Remove the points that are outside the current pointcloud
00281       PointCloudXYZ::Ptr output(new PointCloudXYZ);
00282       pcl::CropBox<PointXYZ> crop_filter;
00283       crop_filter.setInputCloud(cloud_xyz_moved);
00284       crop_filter.setMin(last_min_pt_);
00285       crop_filter.setMax(last_max_pt_);
00286       crop_filter.filter(*output);
00287 
00288       // The overlap estimation
00289       float overlap = output->points.size() * 100 / cloud_xyz_moved->points.size();
00290 
00291       // Safety factor
00292       overlap = (0.002 * overlap + 0.8) * overlap;
00293 
00294       // Publish debugging image
00295       if (overlapping_pub_.getNumSubscribers() > 0)
00296         publishOverlap(cloud_xyz, last_2_current, overlap);
00297 
00298       // Add frame when overlap is less than...
00299       if (overlap < TRACKING_MIN_OVERLAP)
00300       {
00301         return addFrameToMap(range);
00302       }
00303     }
00304     return false;
00305   }
00306 
00307   bool Tracking::addFrameToMap(double range)
00308   {
00309     if (c_frame_.getLeftKp().size() > LC_MIN_INLIERS)
00310     {
00311       c_frame_.regionClustering();
00312 
00313       // Check if enough clusters
00314       vector< vector<int> > clusters = c_frame_.getClusters();
00315       if (clusters.size() > 0)
00316       {
00317         // Add to graph
00318         c_frame_.setId(frame_id_);
00319         f_pub_->publishClustering(c_frame_);
00320         graph_->addFrameToQueue(c_frame_);
00321 
00322         // Store previous frame
00323         p_frame_ = c_frame_;
00324 
00325         // Get the cloud
00326         PointCloudRGB::Ptr cloud(new PointCloudRGB);
00327         pcl::copyPointCloud(*c_frame_.getPointCloud(), *cloud);
00328 
00329         // Save cloud
00330         if (range > params_.min_range && range < params_.max_range)
00331         {
00332           string pointclouds_dir = WORKING_DIRECTORY + "pointclouds/";
00333           string pc_filename = pointclouds_dir + lexical_cast<string>(kf_id_) + ".pcd";
00334           pcl::io::savePCDFileBinary(pc_filename, *cloud);
00335           kf_id_++;
00336         }
00337 
00338         // Store minimum and maximum values of last pointcloud
00339         pcl::getMinMax3D(*cloud, last_min_pt_, last_max_pt_);
00340 
00341         // Publish cloud
00342         if (pc_pub_.getNumSubscribers() > 0 && cloud->points.size() > 0)
00343         {
00344           // Publish
00345           string frame_id_str = Tools::convertTo5digits(frame_id_);
00346           sensor_msgs::PointCloud2 cloud_msg;
00347           pcl::toROSMsg(*cloud, cloud_msg);
00348           cloud_msg.header.frame_id = frame_id_str; // write the keyframe id to the frame id of the message ;)
00349           pc_pub_.publish(cloud_msg);
00350         }
00351 
00352         // Increase the frame id counter
00353         frame_id_++;
00354         return true;
00355       }
00356     }
00357 
00358     return false;
00359   }
00360 
00361   bool Tracking::refinePose(Frame query, Frame candidate, tf::Transform& out, int& num_inliers)
00362   {
00363     // Init
00364     out.setIdentity();
00365     num_inliers = LC_MIN_INLIERS;
00366 
00367     // Sanity check
00368     if (query.getLeftDesc().rows == 0 || candidate.getLeftDesc().rows == 0)
00369       return false;
00370 
00371     // Match current and previous left descriptors
00372     vector<cv::DMatch> matches;
00373     Tools::ratioMatching(query.getLeftDesc(), candidate.getLeftDesc(), 0.8, matches);
00374 
00375     if (matches.size() >= LC_MIN_INLIERS)
00376     {
00377       // Get the matched keypoints
00378       vector<cv::KeyPoint> query_kp_l = query.getLeftKp();
00379       vector<cv::KeyPoint> query_kp_r = query.getRightKp();
00380       vector<cv::Point3f> query_3d = query.getCameraPoints();
00381       vector<cv::KeyPoint> cand_kp_l = candidate.getLeftKp();
00382       vector<cv::KeyPoint> cand_kp_r = candidate.getRightKp();
00383       vector<cv::Point3f> cand_3d = candidate.getCameraPoints();
00384       vector<cv::Point2f> query_matched_kp_l, query_matched_kp_r;
00385       vector<cv::Point2f> cand_matched_kp_l, cand_matched_kp_r;
00386       vector<cv::Point3f> cand_matched_3d_points;
00387       for(uint i=0; i<matches.size(); i++)
00388       {
00389         // Query keypoints
00390         query_matched_kp_l.push_back(query_kp_l[matches[i].queryIdx].pt);
00391         query_matched_kp_r.push_back(query_kp_r[matches[i].queryIdx].pt);
00392 
00393         // Candidate keypoints
00394         cand_matched_kp_l.push_back(cand_kp_l[matches[i].trainIdx].pt);
00395         cand_matched_kp_r.push_back(cand_kp_r[matches[i].trainIdx].pt);
00396 
00397         // 3d points
00398         cand_matched_3d_points.push_back(cand_3d[matches[i].trainIdx]);
00399       }
00400 
00401       cv::Mat rvec = cv::Mat::zeros(3, 1, CV_64FC1);
00402       cv::Mat tvec = cv::Mat::zeros(3, 1, CV_64FC1);
00403       vector<int> inliers;
00404       cv::solvePnPRansac(cand_matched_3d_points, query_matched_kp_l, camera_matrix_,
00405                      cv::Mat(), rvec, tvec, false,
00406                      100, 1.3, LC_MAX_INLIERS, inliers);
00407 
00408       // Inliers threshold
00409       if (inliers.size() < LC_MIN_INLIERS)
00410       {
00411         return false;
00412       }
00413       else
00414       {
00415         // Build output transform
00416         out = Tools::buildTransformation(rvec, tvec);
00417 
00418         // Save the inliers
00419         num_inliers = inliers.size();
00420         return true;
00421       }
00422     }
00423     else
00424     {
00425       return false;
00426     }
00427   }
00428 
00429   PointCloudRGB::Ptr Tracking::filterCloud(PointCloudRGB::Ptr in_cloud)
00430   {
00431     // Remove nans
00432     vector<int> indicies;
00433     PointCloudRGB::Ptr cloud(new PointCloudRGB);
00434     pcl::removeNaNFromPointCloud(*in_cloud, *cloud, indicies);
00435 
00436     // Voxel grid filter (used as x-y surface extraction. Note that leaf in z is very big)
00437     pcl::ApproximateVoxelGrid<PointRGB> grid;
00438     grid.setLeafSize(0.08, 0.08, 0.1);
00439     grid.setDownsampleAllData(true);
00440     grid.setInputCloud(cloud);
00441     grid.filter(*cloud);
00442 
00443     return cloud;
00444   }
00445 
00446   void Tracking::publishOverlap(PointCloudXYZ::Ptr cloud, tf::Transform movement, float overlap)
00447   {
00448     int w = 512;
00449     int h = 384;
00450     cv::Mat img(h, w, CV_8UC3, cv::Scalar(0,0,0));
00451 
00452     // Draw last fixed frame bounding box
00453     cv::rectangle(img, cv::Point(3*w/8, 3*h/8), cv::Point(5*w/8, 5*h/8), cv::Scalar(255, 255, 255) );
00454 
00455     float w_scale = (w/4) / (last_max_pt_(0) - last_min_pt_(0));
00456     float h_scale = (h/4) / (last_max_pt_(1) - last_min_pt_(1));
00457 
00458     // Get boundaries and transform them
00459     PointXYZ min_pt, max_pt;
00460     pcl::getMinMax3D(*cloud, min_pt, max_pt);
00461     tf::Vector3 p1(min_pt.x, max_pt.y, 0.0);
00462     tf::Vector3 p2(max_pt.x, max_pt.y, 0.0);
00463     tf::Vector3 p3(max_pt.x, min_pt.y, 0.0);
00464     tf::Vector3 p4(min_pt.x, min_pt.y, 0.0);
00465     p1 = movement * p1;
00466     p2 = movement * p2;
00467     p3 = movement * p3;
00468     p4 = movement * p4;
00469 
00470     // Draw polygon
00471     cv::line(img, cv::Point(w/2 + p1.x()*w_scale, h/2 + p1.y()*h_scale), cv::Point(w/2 + p2.x()*w_scale, h/2 + p2.y()*h_scale), cv::Scalar(255,0,0));
00472     cv::line(img, cv::Point(w/2 + p2.x()*w_scale, h/2 + p2.y()*h_scale), cv::Point(w/2 + p3.x()*w_scale, h/2 + p3.y()*h_scale), cv::Scalar(255,0,0));
00473     cv::line(img, cv::Point(w/2 + p3.x()*w_scale, h/2 + p3.y()*h_scale), cv::Point(w/2 + p4.x()*w_scale, h/2 + p4.y()*h_scale), cv::Scalar(255,0,0));
00474     cv::line(img, cv::Point(w/2 + p4.x()*w_scale, h/2 + p4.y()*h_scale), cv::Point(w/2 + p1.x()*w_scale, h/2 + p1.y()*h_scale), cv::Scalar(255,0,0));
00475 
00476     // Insert text
00477     stringstream s;
00478     s << "Overlap: " << (int)overlap << "%";
00479     cv::putText(img, s.str(), cv::Point(30, h-20), cv::FONT_HERSHEY_PLAIN, 1.0, cv::Scalar(255, 255, 255), 2, 8);
00480 
00481     // Publish
00482     cv_bridge::CvImage ros_image;
00483     ros_image.image = img.clone();
00484     ros_image.header.stamp = ros::Time::now();
00485     ros_image.encoding = "bgr8";
00486     overlapping_pub_.publish(ros_image.toImageMsg());
00487   }
00488 
00489 } //namespace slam


stereo_slam
Author(s): Pep Lluis Negre
autogenerated on Thu Jul 14 2016 04:09:13