stereo_odometer.cpp
Go to the documentation of this file.
00001 #include <ros/ros.h>
00002 #include <sensor_msgs/image_encodings.h>
00003 #include <image_geometry/stereo_camera_model.h>
00004 #include <cv_bridge/cv_bridge.h>
00005 #include <pcl_ros/point_cloud.h>
00006 #include <pcl/point_types.h>
00007 
00008 #include <viso_stereo.h>
00009 
00010 #include <viso2_ros/VisoInfo.h>
00011 
00012 #include "stereo_processor.h"
00013 #include "odometer_base.h"
00014 #include "odometry_params.h"
00015 
00016 // to remove after debugging
00017 #include <opencv2/highgui/highgui.hpp>
00018 
00019 namespace viso2_ros
00020 {
00021 
00022 // some arbitrary values (0.1m^2 linear cov. 10deg^2. angular cov.)
00023 static const boost::array<double, 36> STANDARD_POSE_COVARIANCE =
00024 { { 0.1, 0, 0, 0, 0, 0,
00025     0, 0.1, 0, 0, 0, 0,
00026     0, 0, 0.1, 0, 0, 0,
00027     0, 0, 0, 0.17, 0, 0,
00028     0, 0, 0, 0, 0.17, 0,
00029     0, 0, 0, 0, 0, 0.17 } };
00030 static const boost::array<double, 36> STANDARD_TWIST_COVARIANCE =
00031 { { 0.002, 0, 0, 0, 0, 0,
00032     0, 0.002, 0, 0, 0, 0,
00033     0, 0, 0.05, 0, 0, 0,
00034     0, 0, 0, 0.09, 0, 0,
00035     0, 0, 0, 0, 0.09, 0,
00036     0, 0, 0, 0, 0, 0.09 } };
00037 static const boost::array<double, 36> BAD_COVARIANCE =
00038 { { 9999, 0, 0, 0, 0, 0,
00039     0, 9999, 0, 0, 0, 0,
00040     0, 0, 9999, 0, 0, 0,
00041     0, 0, 0, 9999, 0, 0,
00042     0, 0, 0, 0, 9999, 0,
00043     0, 0, 0, 0, 0, 9999 } };
00044 
00045 
00046 class StereoOdometer : public StereoProcessor, public OdometerBase
00047 {
00048 
00049 private:
00050 
00051   boost::shared_ptr<VisualOdometryStereo> visual_odometer_;
00052   VisualOdometryStereo::parameters visual_odometer_params_;
00053 
00054   ros::Publisher point_cloud_pub_;
00055   ros::Publisher info_pub_;
00056 
00057   bool got_lost_;
00058 
00059   // change reference frame method. 0, 1 or 2. 0 means allways change. 1 and 2 explained below
00060   int ref_frame_change_method_;
00061   bool change_reference_frame_;
00062   double ref_frame_motion_threshold_; // method 1. Change the reference frame if last motion is small
00063   int ref_frame_inlier_threshold_; // method 2. Change the reference frame if the number of inliers is low
00064   Matrix reference_motion_;
00065 
00066 public:
00067 
00068   typedef pcl::PointCloud<pcl::PointXYZRGB> PointCloud;
00069 
00070   StereoOdometer(const std::string& transport) :
00071     StereoProcessor(transport), OdometerBase(),
00072     got_lost_(false), change_reference_frame_(false)
00073   {
00074     // Read local parameters
00075     ros::NodeHandle local_nh("~");
00076     odometry_params::loadParams(local_nh, visual_odometer_params_);
00077 
00078     local_nh.param("ref_frame_change_method", ref_frame_change_method_, 0);
00079     local_nh.param("ref_frame_motion_threshold", ref_frame_motion_threshold_, 5.0);
00080     local_nh.param("ref_frame_inlier_threshold", ref_frame_inlier_threshold_, 150);
00081 
00082     point_cloud_pub_ = local_nh.advertise<PointCloud>("point_cloud", 1);
00083     info_pub_ = local_nh.advertise<VisoInfo>("info", 1);
00084 
00085     reference_motion_ = Matrix::eye(4);
00086   }
00087 
00088 protected:
00089 
00090   void initOdometer(
00091       const sensor_msgs::CameraInfoConstPtr& l_info_msg,
00092       const sensor_msgs::CameraInfoConstPtr& r_info_msg)
00093   {
00094     int queue_size;
00095     bool approximate_sync;
00096     ros::NodeHandle local_nh("~");
00097     local_nh.param("queue_size", queue_size, 5);
00098     local_nh.param("approximate_sync", approximate_sync, false);
00099 
00100     // read calibration info from camera info message
00101     // to fill remaining parameters
00102     image_geometry::StereoCameraModel model;
00103     model.fromCameraInfo(*l_info_msg, *r_info_msg);
00104     visual_odometer_params_.base      = model.baseline();
00105     visual_odometer_params_.calib.cu  = model.left().cx();
00106     visual_odometer_params_.calib.cv  = model.left().cy();
00107     visual_odometer_params_.calib.f   = model.left().fx();
00108 
00109     visual_odometer_.reset(new VisualOdometryStereo(visual_odometer_params_));
00110     if (l_info_msg->header.frame_id != "") setSensorFrameId(l_info_msg->header.frame_id);
00111     ROS_INFO_STREAM("Initialized libviso2 stereo odometry "
00112                     "with the following parameters:" << std::endl <<
00113                     visual_odometer_params_ <<
00114                     "  queue_size = " << queue_size << std::endl <<
00115                     "  approximate_sync = " << approximate_sync << std::endl <<
00116                     "  ref_frame_change_method = " << ref_frame_change_method_ << std::endl <<
00117                     "  ref_frame_motion_threshold = " << ref_frame_motion_threshold_ << std::endl <<
00118                     "  ref_frame_inlier_threshold = " << ref_frame_inlier_threshold_);
00119   }
00120 
00121   void imageCallback(
00122       const sensor_msgs::ImageConstPtr& l_image_msg,
00123       const sensor_msgs::ImageConstPtr& r_image_msg,
00124       const sensor_msgs::CameraInfoConstPtr& l_info_msg,
00125       const sensor_msgs::CameraInfoConstPtr& r_info_msg)
00126   {
00127     ros::WallTime start_time = ros::WallTime::now();
00128     bool first_run = false;
00129     // create odometer if not exists
00130     if (!visual_odometer_)
00131     {
00132       first_run = true;
00133       initOdometer(l_info_msg, r_info_msg);
00134     }
00135 
00136     // convert images if necessary
00137     uint8_t *l_image_data, *r_image_data;
00138     int l_step, r_step;
00139     cv_bridge::CvImageConstPtr l_cv_ptr, r_cv_ptr;
00140     l_cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::MONO8);
00141     l_image_data = l_cv_ptr->image.data;
00142     l_step = l_cv_ptr->image.step[0];
00143     r_cv_ptr = cv_bridge::toCvShare(r_image_msg, sensor_msgs::image_encodings::MONO8);
00144     r_image_data = r_cv_ptr->image.data;
00145     r_step = r_cv_ptr->image.step[0];
00146 
00147     ROS_ASSERT(l_step == r_step);
00148     ROS_ASSERT(l_image_msg->width == r_image_msg->width);
00149     ROS_ASSERT(l_image_msg->height == r_image_msg->height);
00150 
00151     int32_t dims[] = {l_image_msg->width, l_image_msg->height, l_step};
00152     // on first run or when odometer got lost, only feed the odometer with
00153     // images without retrieving data
00154     if (first_run || got_lost_)
00155     {
00156       visual_odometer_->process(l_image_data, r_image_data, dims);
00157       got_lost_ = false;
00158       // on first run publish zero once
00159       if (first_run)
00160       {
00161         tf::Transform delta_transform;
00162         delta_transform.setIdentity();
00163         integrateAndPublish(delta_transform, l_image_msg->header.stamp);
00164       }
00165     }
00166     else
00167     {
00168       bool success = visual_odometer_->process(
00169           l_image_data, r_image_data, dims, change_reference_frame_);
00170       if (success)
00171       {
00172         Matrix motion = Matrix::inv(visual_odometer_->getMotion());
00173         ROS_DEBUG("Found %i matches with %i inliers.",
00174                   visual_odometer_->getNumberOfMatches(),
00175                   visual_odometer_->getNumberOfInliers());
00176         ROS_DEBUG_STREAM("libviso2 returned the following motion:\n" << motion);
00177         Matrix camera_motion;
00178         // if image was replaced due to small motion we have to subtract the
00179         // last motion to get the increment
00180         if (change_reference_frame_)
00181         {
00182           camera_motion = Matrix::inv(reference_motion_) * motion;
00183         }
00184         else
00185         {
00186           // image was not replaced, report full motion from odometer
00187           camera_motion = motion;
00188         }
00189         reference_motion_ = motion; // store last motion as reference
00190 
00191         tf::Matrix3x3 rot_mat(
00192           camera_motion.val[0][0], camera_motion.val[0][1], camera_motion.val[0][2],
00193           camera_motion.val[1][0], camera_motion.val[1][1], camera_motion.val[1][2],
00194           camera_motion.val[2][0], camera_motion.val[2][1], camera_motion.val[2][2]);
00195         tf::Vector3 t(camera_motion.val[0][3], camera_motion.val[1][3], camera_motion.val[2][3]);
00196         tf::Transform delta_transform(rot_mat, t);
00197 
00198         setPoseCovariance(STANDARD_POSE_COVARIANCE);
00199         setTwistCovariance(STANDARD_TWIST_COVARIANCE);
00200 
00201         integrateAndPublish(delta_transform, l_image_msg->header.stamp);
00202 
00203         if (point_cloud_pub_.getNumSubscribers() > 0)
00204         {
00205           computeAndPublishPointCloud(l_info_msg, l_image_msg, r_info_msg,
00206                                       visual_odometer_->getMatches(),
00207                                       visual_odometer_->getInlierIndices());
00208         }
00209       }
00210       else
00211       {
00212         setPoseCovariance(BAD_COVARIANCE);
00213         setTwistCovariance(BAD_COVARIANCE);
00214         tf::Transform delta_transform;
00215         delta_transform.setIdentity();
00216         integrateAndPublish(delta_transform, l_image_msg->header.stamp);
00217 
00218         ROS_DEBUG("Call to VisualOdometryStereo::process() failed.");
00219         ROS_WARN_THROTTLE(10.0, "Visual Odometer got lost!");
00220         got_lost_ = true;
00221       }
00222 
00223       if(success)
00224       {
00225 
00226         // Proceed depending on the reference frame change method
00227         switch ( ref_frame_change_method_ )
00228         {
00229           case 1:
00230           {
00231             // calculate current feature flow
00232             double feature_flow = computeFeatureFlow(visual_odometer_->getMatches());
00233             change_reference_frame_ = (feature_flow < ref_frame_motion_threshold_);
00234             ROS_DEBUG_STREAM("Feature flow is " << feature_flow
00235                 << ", marking last motion as "
00236                 << (change_reference_frame_ ? "small." : "normal."));
00237             break;
00238           }
00239           case 2:
00240           {
00241             change_reference_frame_ = (visual_odometer_->getNumberOfInliers() > ref_frame_inlier_threshold_);
00242             break;
00243           }
00244           default:
00245             change_reference_frame_ = false;
00246         }
00247 
00248       }
00249       else
00250         change_reference_frame_ = false;
00251 
00252       if(!change_reference_frame_)
00253         ROS_DEBUG_STREAM("Changing reference frame");
00254 
00255       // create and publish viso2 info msg
00256       VisoInfo info_msg;
00257       info_msg.header.stamp = l_image_msg->header.stamp;
00258       info_msg.got_lost = !success;
00259       info_msg.change_reference_frame = !change_reference_frame_;
00260       info_msg.num_matches = visual_odometer_->getNumberOfMatches();
00261       info_msg.num_inliers = visual_odometer_->getNumberOfInliers();
00262       ros::WallDuration time_elapsed = ros::WallTime::now() - start_time;
00263       info_msg.runtime = time_elapsed.toSec();
00264       info_pub_.publish(info_msg);
00265     }
00266   }
00267 
00268   double computeFeatureFlow(
00269       const std::vector<Matcher::p_match>& matches)
00270   {
00271     double total_flow = 0.0;
00272     for (size_t i = 0; i < matches.size(); ++i)
00273     {
00274       double x_diff = matches[i].u1c - matches[i].u1p;
00275       double y_diff = matches[i].v1c - matches[i].v1p;
00276       total_flow += sqrt(x_diff * x_diff + y_diff * y_diff);
00277     }
00278     return total_flow / matches.size();
00279   }
00280 
00281   void computeAndPublishPointCloud(
00282       const sensor_msgs::CameraInfoConstPtr& l_info_msg,
00283       const sensor_msgs::ImageConstPtr& l_image_msg,
00284       const sensor_msgs::CameraInfoConstPtr& r_info_msg,
00285       const std::vector<Matcher::p_match>& matches,
00286       const std::vector<int32_t>& inlier_indices)
00287   {
00288     try
00289     {
00290       cv_bridge::CvImageConstPtr cv_ptr;
00291       cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::RGB8);
00292       // read calibration info from camera info message
00293       image_geometry::StereoCameraModel model;
00294       model.fromCameraInfo(*l_info_msg, *r_info_msg);
00295       PointCloud::Ptr point_cloud(new PointCloud());
00296       point_cloud->header.frame_id = getSensorFrameId();
00297       point_cloud->header.stamp = pcl_conversions::toPCL(l_info_msg->header).stamp;
00298       point_cloud->width = 1;
00299       point_cloud->height = inlier_indices.size();
00300       point_cloud->points.resize(inlier_indices.size());
00301 
00302       for (size_t i = 0; i < inlier_indices.size(); ++i)
00303       {
00304         const Matcher::p_match& match = matches[inlier_indices[i]];
00305         cv::Point2d left_uv;
00306         left_uv.x = match.u1c;
00307         left_uv.y = match.v1c;
00308         cv::Point3d point;
00309         double disparity = match.u1c - match.u2c;
00310         model.projectDisparityTo3d(left_uv, disparity, point);
00311         point_cloud->points[i].x = point.x;
00312         point_cloud->points[i].y = point.y;
00313         point_cloud->points[i].z = point.z;
00314         cv::Vec3b color = cv_ptr->image.at<cv::Vec3b>(left_uv.y,left_uv.x);
00315         point_cloud->points[i].r = color[0];
00316         point_cloud->points[i].g = color[1];
00317         point_cloud->points[i].b = color[2];
00318       }
00319       ROS_DEBUG("Publishing point cloud with %zu points.", point_cloud->size());
00320       point_cloud_pub_.publish(point_cloud);
00321     }
00322     catch (cv_bridge::Exception& e)
00323     {
00324       ROS_ERROR("cv_bridge exception: %s", e.what());
00325     }
00326   }
00327 };
00328 
00329 } // end of namespace
00330 
00331 
00332 int main(int argc, char **argv)
00333 {
00334   ros::init(argc, argv, "stereo_odometer");
00335   if (ros::names::remap("stereo") == "stereo") {
00336     ROS_WARN("'stereo' has not been remapped! Example command-line usage:\n"
00337              "\t$ rosrun viso2_ros stereo_odometer stereo:=narrow_stereo image:=image_rect");
00338   }
00339   if (ros::names::remap("image").find("rect") == std::string::npos) {
00340     ROS_WARN("stereo_odometer needs rectified input images. The used image "
00341              "topic is '%s'. Are you sure the images are rectified?",
00342              ros::names::remap("image").c_str());
00343   }
00344 
00345   std::string transport = argc > 1 ? argv[1] : "raw";
00346   viso2_ros::StereoOdometer odometer(transport);
00347 
00348   ros::spin();
00349   return 0;
00350 }
00351 


viso2_ros
Author(s): Stephan Wirth
autogenerated on Thu Jun 6 2019 21:23:20