00001 #include <ros/ros.h>
00002 #include <sensor_msgs/image_encodings.h>
00003 #include <image_geometry/stereo_camera_model.h>
00004 #include <cv_bridge/cv_bridge.h>
00005 #include <pcl_ros/point_cloud.h>
00006 #include <pcl/point_types.h>
00007
00008 #include <viso_stereo.h>
00009
00010 #include <viso2_ros/VisoInfo.h>
00011
00012 #include "stereo_processor.h"
00013 #include "odometer_base.h"
00014 #include "odometry_params.h"
00015
00016
00017 #include <opencv2/highgui/highgui.hpp>
00018
00019 namespace viso2_ros
00020 {
00021
00022
00023 static const boost::array<double, 36> STANDARD_POSE_COVARIANCE =
00024 { { 0.1, 0, 0, 0, 0, 0,
00025 0, 0.1, 0, 0, 0, 0,
00026 0, 0, 0.1, 0, 0, 0,
00027 0, 0, 0, 0.17, 0, 0,
00028 0, 0, 0, 0, 0.17, 0,
00029 0, 0, 0, 0, 0, 0.17 } };
00030 static const boost::array<double, 36> STANDARD_TWIST_COVARIANCE =
00031 { { 0.05, 0, 0, 0, 0, 0,
00032 0, 0.05, 0, 0, 0, 0,
00033 0, 0, 0.05, 0, 0, 0,
00034 0, 0, 0, 0.09, 0, 0,
00035 0, 0, 0, 0, 0.09, 0,
00036 0, 0, 0, 0, 0, 0.09 } };
00037 static const boost::array<double, 36> BAD_COVARIANCE =
00038 { { 9999, 0, 0, 0, 0, 0,
00039 0, 9999, 0, 0, 0, 0,
00040 0, 0, 9999, 0, 0, 0,
00041 0, 0, 0, 9999, 0, 0,
00042 0, 0, 0, 0, 9999, 0,
00043 0, 0, 0, 0, 0, 9999 } };
00044
00045
00046 class StereoOdometer : public StereoProcessor, public OdometerBase
00047 {
00048
00049 private:
00050
00051 boost::shared_ptr<VisualOdometryStereo> visual_odometer_;
00052 VisualOdometryStereo::parameters visual_odometer_params_;
00053
00054 ros::Publisher point_cloud_pub_;
00055 ros::Publisher info_pub_;
00056
00057 bool got_lost_;
00058
00059
00060 int ref_frame_change_method_;
00061 bool change_reference_frame_;
00062 double ref_frame_motion_threshold_;
00063 int ref_frame_inlier_threshold_;
00064 Matrix reference_motion_;
00065
00066 public:
00067
00068 typedef pcl::PointCloud<pcl::PointXYZRGB> PointCloud;
00069
00070 StereoOdometer(const std::string& transport) :
00071 StereoProcessor(transport), OdometerBase(),
00072 got_lost_(false), change_reference_frame_(false)
00073 {
00074
00075 ros::NodeHandle local_nh("~");
00076 odometry_params::loadParams(local_nh, visual_odometer_params_);
00077
00078 local_nh.param("ref_frame_change_method", ref_frame_change_method_, 0);
00079 local_nh.param("ref_frame_motion_threshold", ref_frame_motion_threshold_, 5.0);
00080 local_nh.param("ref_frame_inlier_threshold", ref_frame_inlier_threshold_, 150);
00081
00082 point_cloud_pub_ = local_nh.advertise<PointCloud>("point_cloud", 1);
00083 info_pub_ = local_nh.advertise<VisoInfo>("info", 1);
00084
00085 reference_motion_ = Matrix::eye(4);
00086 }
00087
00088 protected:
00089
00090 void initOdometer(
00091 const sensor_msgs::CameraInfoConstPtr& l_info_msg,
00092 const sensor_msgs::CameraInfoConstPtr& r_info_msg)
00093 {
00094
00095
00096 image_geometry::StereoCameraModel model;
00097 model.fromCameraInfo(*l_info_msg, *r_info_msg);
00098 visual_odometer_params_.base = model.baseline();
00099 visual_odometer_params_.calib.f = model.left().fx();
00100 visual_odometer_params_.calib.cu = model.left().cx();
00101 visual_odometer_params_.calib.cv = model.left().cy();
00102 visual_odometer_.reset(new VisualOdometryStereo(visual_odometer_params_));
00103 if (l_info_msg->header.frame_id != "") setSensorFrameId(l_info_msg->header.frame_id);
00104 ROS_INFO_STREAM("Initialized libviso2 stereo odometry "
00105 "with the following parameters:" << std::endl <<
00106 visual_odometer_params_ <<
00107 " ref_frame_change_method = " << ref_frame_change_method_ << std::endl <<
00108 " ref_frame_motion_threshold = " << ref_frame_motion_threshold_ << std::endl <<
00109 " ref_frame_inlier_threshold = " << ref_frame_inlier_threshold_);
00110 }
00111
00112 void imageCallback(
00113 const sensor_msgs::ImageConstPtr& l_image_msg,
00114 const sensor_msgs::ImageConstPtr& r_image_msg,
00115 const sensor_msgs::CameraInfoConstPtr& l_info_msg,
00116 const sensor_msgs::CameraInfoConstPtr& r_info_msg)
00117 {
00118 ros::WallTime start_time = ros::WallTime::now();
00119 bool first_run = false;
00120
00121 if (!visual_odometer_)
00122 {
00123 first_run = true;
00124 initOdometer(l_info_msg, r_info_msg);
00125 }
00126
00127
00128 uint8_t *l_image_data, *r_image_data;
00129 int l_step, r_step;
00130 cv_bridge::CvImageConstPtr l_cv_ptr, r_cv_ptr;
00131 l_cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::MONO8);
00132 l_image_data = l_cv_ptr->image.data;
00133 l_step = l_cv_ptr->image.step[0];
00134 r_cv_ptr = cv_bridge::toCvShare(r_image_msg, sensor_msgs::image_encodings::MONO8);
00135 r_image_data = r_cv_ptr->image.data;
00136 r_step = r_cv_ptr->image.step[0];
00137
00138 ROS_ASSERT(l_step == r_step);
00139 ROS_ASSERT(l_image_msg->width == r_image_msg->width);
00140 ROS_ASSERT(l_image_msg->height == r_image_msg->height);
00141
00142 int32_t dims[] = {l_image_msg->width, l_image_msg->height, l_step};
00143
00144
00145 if (first_run || got_lost_)
00146 {
00147 visual_odometer_->process(l_image_data, r_image_data, dims);
00148 got_lost_ = false;
00149
00150 if (first_run)
00151 {
00152 tf::Transform delta_transform;
00153 delta_transform.setIdentity();
00154 integrateAndPublish(delta_transform, l_image_msg->header.stamp);
00155 }
00156 }
00157 else
00158 {
00159 bool success = visual_odometer_->process(
00160 l_image_data, r_image_data, dims, change_reference_frame_);
00161 if (success)
00162 {
00163 Matrix motion = Matrix::inv(visual_odometer_->getMotion());
00164 ROS_DEBUG("Found %i matches with %i inliers.",
00165 visual_odometer_->getNumberOfMatches(),
00166 visual_odometer_->getNumberOfInliers());
00167 ROS_DEBUG_STREAM("libviso2 returned the following motion:\n" << motion);
00168 Matrix camera_motion;
00169
00170
00171 if (change_reference_frame_)
00172 {
00173 camera_motion = Matrix::inv(reference_motion_) * motion;
00174 }
00175 else
00176 {
00177
00178 camera_motion = motion;
00179 }
00180 reference_motion_ = motion;
00181
00182 tf::Matrix3x3 rot_mat(
00183 camera_motion.val[0][0], camera_motion.val[0][1], camera_motion.val[0][2],
00184 camera_motion.val[1][0], camera_motion.val[1][1], camera_motion.val[1][2],
00185 camera_motion.val[2][0], camera_motion.val[2][1], camera_motion.val[2][2]);
00186 tf::Vector3 t(camera_motion.val[0][3], camera_motion.val[1][3], camera_motion.val[2][3]);
00187 tf::Transform delta_transform(rot_mat, t);
00188
00189 setPoseCovariance(STANDARD_POSE_COVARIANCE);
00190 setTwistCovariance(STANDARD_TWIST_COVARIANCE);
00191
00192 integrateAndPublish(delta_transform, l_image_msg->header.stamp);
00193
00194 if (point_cloud_pub_.getNumSubscribers() > 0)
00195 {
00196 computeAndPublishPointCloud(l_info_msg, l_image_msg, r_info_msg,
00197 visual_odometer_->getMatches(),
00198 visual_odometer_->getInlierIndices());
00199 }
00200 }
00201 else
00202 {
00203 setPoseCovariance(BAD_COVARIANCE);
00204 setTwistCovariance(BAD_COVARIANCE);
00205 tf::Transform delta_transform;
00206 delta_transform.setIdentity();
00207 integrateAndPublish(delta_transform, l_image_msg->header.stamp);
00208
00209 ROS_DEBUG("Call to VisualOdometryStereo::process() failed.");
00210 ROS_WARN_THROTTLE(1.0, "Visual Odometer got lost!");
00211 got_lost_ = true;
00212 }
00213
00214 if(success)
00215 {
00216
00217
00218 switch ( ref_frame_change_method_ )
00219 {
00220 case 1:
00221 {
00222
00223 double feature_flow = computeFeatureFlow(visual_odometer_->getMatches());
00224 change_reference_frame_ = (feature_flow < ref_frame_motion_threshold_);
00225 ROS_DEBUG_STREAM("Feature flow is " << feature_flow
00226 << ", marking last motion as "
00227 << (change_reference_frame_ ? "small." : "normal."));
00228 break;
00229 }
00230 case 2:
00231 {
00232 change_reference_frame_ = (visual_odometer_->getNumberOfInliers() > ref_frame_inlier_threshold_);
00233 break;
00234 }
00235 default:
00236 change_reference_frame_ = false;
00237 }
00238
00239 }
00240 else
00241 change_reference_frame_ = false;
00242
00243 if(!change_reference_frame_)
00244 ROS_DEBUG_STREAM("Changing reference frame");
00245
00246
00247 VisoInfo info_msg;
00248 info_msg.header.stamp = l_image_msg->header.stamp;
00249 info_msg.got_lost = !success;
00250 info_msg.change_reference_frame = !change_reference_frame_;
00251 info_msg.num_matches = visual_odometer_->getNumberOfMatches();
00252 info_msg.num_inliers = visual_odometer_->getNumberOfInliers();
00253 ros::WallDuration time_elapsed = ros::WallTime::now() - start_time;
00254 info_msg.runtime = time_elapsed.toSec();
00255 info_pub_.publish(info_msg);
00256 }
00257 }
00258
00259 double computeFeatureFlow(
00260 const std::vector<Matcher::p_match>& matches)
00261 {
00262 double total_flow = 0.0;
00263 for (size_t i = 0; i < matches.size(); ++i)
00264 {
00265 double x_diff = matches[i].u1c - matches[i].u1p;
00266 double y_diff = matches[i].v1c - matches[i].v1p;
00267 total_flow += sqrt(x_diff * x_diff + y_diff * y_diff);
00268 }
00269 return total_flow / matches.size();
00270 }
00271
00272 void computeAndPublishPointCloud(
00273 const sensor_msgs::CameraInfoConstPtr& l_info_msg,
00274 const sensor_msgs::ImageConstPtr& l_image_msg,
00275 const sensor_msgs::CameraInfoConstPtr& r_info_msg,
00276 const std::vector<Matcher::p_match>& matches,
00277 const std::vector<int32_t>& inlier_indices)
00278 {
00279 try
00280 {
00281 cv_bridge::CvImageConstPtr cv_ptr;
00282 cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::RGB8);
00283
00284 image_geometry::StereoCameraModel model;
00285 model.fromCameraInfo(*l_info_msg, *r_info_msg);
00286 PointCloud::Ptr point_cloud(new PointCloud());
00287 point_cloud->header.frame_id = getSensorFrameId();
00288 point_cloud->header.stamp = pcl_conversions::toPCL(l_info_msg->header).stamp;
00289 point_cloud->width = 1;
00290 point_cloud->height = inlier_indices.size();
00291 point_cloud->points.resize(inlier_indices.size());
00292
00293 for (size_t i = 0; i < inlier_indices.size(); ++i)
00294 {
00295 const Matcher::p_match& match = matches[inlier_indices[i]];
00296 cv::Point2d left_uv;
00297 left_uv.x = match.u1c;
00298 left_uv.y = match.v1c;
00299 cv::Point3d point;
00300 double disparity = match.u1c - match.u2c;
00301 model.projectDisparityTo3d(left_uv, disparity, point);
00302 point_cloud->points[i].x = point.x;
00303 point_cloud->points[i].y = point.y;
00304 point_cloud->points[i].z = point.z;
00305 cv::Vec3b color = cv_ptr->image.at<cv::Vec3b>(left_uv.y,left_uv.x);
00306 point_cloud->points[i].r = color[0];
00307 point_cloud->points[i].g = color[1];
00308 point_cloud->points[i].b = color[2];
00309 }
00310 ROS_DEBUG("Publishing point cloud with %zu points.", point_cloud->size());
00311 point_cloud_pub_.publish(point_cloud);
00312 }
00313 catch (cv_bridge::Exception& e)
00314 {
00315 ROS_ERROR("cv_bridge exception: %s", e.what());
00316 }
00317 }
00318 };
00319
00320 }
00321
00322
00323 int main(int argc, char **argv)
00324 {
00325 ros::init(argc, argv, "stereo_odometer");
00326 if (ros::names::remap("stereo") == "stereo") {
00327 ROS_WARN("'stereo' has not been remapped! Example command-line usage:\n"
00328 "\t$ rosrun viso2_ros stereo_odometer stereo:=narrow_stereo image:=image_rect");
00329 }
00330 if (ros::names::remap("image").find("rect") == std::string::npos) {
00331 ROS_WARN("stereo_odometer needs rectified input images. The used image "
00332 "topic is '%s'. Are you sure the images are rectified?",
00333 ros::names::remap("image").c_str());
00334 }
00335
00336 std::string transport = argc > 1 ? argv[1] : "raw";
00337 viso2_ros::StereoOdometer odometer(transport);
00338
00339 ros::spin();
00340 return 0;
00341 }
00342