00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028 #include "OdometryROS.h"
00029
00030 #include <pluginlib/class_list_macros.h>
00031 #include <nodelet/nodelet.h>
00032
00033 #include <message_filters/subscriber.h>
00034 #include <message_filters/time_synchronizer.h>
00035 #include <message_filters/sync_policies/approximate_time.h>
00036
00037 #include <image_transport/image_transport.h>
00038 #include <image_transport/subscriber_filter.h>
00039
00040 #include <image_geometry/stereo_camera_model.h>
00041
00042 #include <sensor_msgs/Image.h>
00043 #include <sensor_msgs/image_encodings.h>
00044 #include <cv_bridge/cv_bridge.h>
00045
00046 #include "rtabmap_ros/MsgConversion.h"
00047
00048 #include <rtabmap/core/util3d.h>
00049 #include <rtabmap/core/util2d.h>
00050 #include <rtabmap/utilite/ULogger.h>
00051 #include <rtabmap/utilite/UConversion.h>
00052
00053 using namespace rtabmap;
00054
00055 namespace rtabmap_ros
00056 {
00057
00058 class RGBDOdometry : public rtabmap_ros::OdometryROS
00059 {
00060 public:
00061 RGBDOdometry() :
00062 OdometryROS(false),
00063 approxSync_(0),
00064 exactSync_(0),
00065 sync2_(0),
00066 queueSize_(5)
00067 {
00068 }
00069
00070 virtual ~RGBDOdometry()
00071 {
00072 if(approxSync_)
00073 {
00074 delete approxSync_;
00075 }
00076 if(exactSync_)
00077 {
00078 delete exactSync_;
00079 }
00080 if(sync2_)
00081 {
00082 delete sync2_;
00083 }
00084 }
00085
00086 private:
00087
00088 virtual void onOdomInit()
00089 {
00090 ros::NodeHandle & nh = getNodeHandle();
00091 ros::NodeHandle & pnh = getPrivateNodeHandle();
00092
00093 int depthCameras = 1;
00094 bool approxSync = true;
00095 pnh.param("approx_sync", approxSync, approxSync);
00096 pnh.param("queue_size", queueSize_, queueSize_);
00097 pnh.param("depth_cameras", depthCameras, depthCameras);
00098 if(depthCameras <= 0)
00099 {
00100 depthCameras = 1;
00101 }
00102 if(depthCameras > 2)
00103 {
00104 NODELET_FATAL("Only 2 cameras maximum supported yet.");
00105 }
00106
00107 if(depthCameras == 2)
00108 {
00109 ros::NodeHandle rgb0_nh(nh, "rgb0");
00110 ros::NodeHandle depth0_nh(nh, "depth0");
00111 ros::NodeHandle rgb0_pnh(pnh, "rgb0");
00112 ros::NodeHandle depth0_pnh(pnh, "depth0");
00113 image_transport::ImageTransport rgb0_it(rgb0_nh);
00114 image_transport::ImageTransport depth0_it(depth0_nh);
00115 image_transport::TransportHints hintsRgb0("raw", ros::TransportHints(), rgb0_pnh);
00116 image_transport::TransportHints hintsDepth0("raw", ros::TransportHints(), depth0_pnh);
00117
00118 image_mono_sub_.subscribe(rgb0_it, rgb0_nh.resolveName("image"), 1, hintsRgb0);
00119 image_depth_sub_.subscribe(depth0_it, depth0_nh.resolveName("image"), 1, hintsDepth0);
00120 info_sub_.subscribe(rgb0_nh, "camera_info", 1);
00121
00122 ros::NodeHandle rgb1_nh(nh, "rgb1");
00123 ros::NodeHandle depth1_nh(nh, "depth1");
00124 ros::NodeHandle rgb1_pnh(pnh, "rgb1");
00125 ros::NodeHandle depth1_pnh(pnh, "depth1");
00126 image_transport::ImageTransport rgb1_it(rgb1_nh);
00127 image_transport::ImageTransport depth1_it(depth1_nh);
00128 image_transport::TransportHints hintsRgb1("raw", ros::TransportHints(), rgb1_pnh);
00129 image_transport::TransportHints hintsDepth1("raw", ros::TransportHints(), depth1_pnh);
00130
00131 image_mono2_sub_.subscribe(rgb1_it, rgb1_nh.resolveName("image"), 1, hintsRgb1);
00132 image_depth2_sub_.subscribe(depth1_it, depth1_nh.resolveName("image"), 1, hintsDepth1);
00133 info2_sub_.subscribe(rgb1_nh, "camera_info", 1);
00134
00135 NODELET_INFO("\n%s subscribed to:\n %s,\n %s,\n %s,\n %s,\n %s,\n %s",
00136 ros::this_node::getName().c_str(),
00137 image_mono_sub_.getTopic().c_str(),
00138 image_depth_sub_.getTopic().c_str(),
00139 info_sub_.getTopic().c_str(),
00140 image_mono2_sub_.getTopic().c_str(),
00141 image_depth2_sub_.getTopic().c_str(),
00142 info2_sub_.getTopic().c_str());
00143
00144 sync2_ = new message_filters::Synchronizer<MySync2Policy>(
00145 MySync2Policy(queueSize_),
00146 image_mono_sub_,
00147 image_depth_sub_,
00148 info_sub_,
00149 image_mono2_sub_,
00150 image_depth2_sub_,
00151 info2_sub_);
00152 sync2_->registerCallback(boost::bind(&RGBDOdometry::callback2, this, _1, _2, _3, _4, _5, _6));
00153 }
00154 else
00155 {
00156 ros::NodeHandle rgb_nh(nh, "rgb");
00157 ros::NodeHandle depth_nh(nh, "depth");
00158 ros::NodeHandle rgb_pnh(pnh, "rgb");
00159 ros::NodeHandle depth_pnh(pnh, "depth");
00160 image_transport::ImageTransport rgb_it(rgb_nh);
00161 image_transport::ImageTransport depth_it(depth_nh);
00162 image_transport::TransportHints hintsRgb("raw", ros::TransportHints(), rgb_pnh);
00163 image_transport::TransportHints hintsDepth("raw", ros::TransportHints(), depth_pnh);
00164
00165 image_mono_sub_.subscribe(rgb_it, rgb_nh.resolveName("image"), 1, hintsRgb);
00166 image_depth_sub_.subscribe(depth_it, depth_nh.resolveName("image"), 1, hintsDepth);
00167 info_sub_.subscribe(rgb_nh, "camera_info", 1);
00168
00169 if(approxSync)
00170 {
00171 approxSync_ = new message_filters::Synchronizer<MyApproxSyncPolicy>(MyApproxSyncPolicy(queueSize_), image_mono_sub_, image_depth_sub_, info_sub_);
00172 approxSync_->registerCallback(boost::bind(&RGBDOdometry::callback, this, _1, _2, _3));
00173 }
00174 else
00175 {
00176 exactSync_ = new message_filters::Synchronizer<MyExactSyncPolicy>(MyExactSyncPolicy(queueSize_), image_mono_sub_, image_depth_sub_, info_sub_);
00177 exactSync_->registerCallback(boost::bind(&RGBDOdometry::callback, this, _1, _2, _3));
00178 }
00179
00180 NODELET_INFO("\n%s subscribed to (%s sync):\n %s,\n %s,\n %s",
00181 ros::this_node::getName().c_str(),
00182 approxSync?"approx":"exact",
00183 image_mono_sub_.getTopic().c_str(),
00184 image_depth_sub_.getTopic().c_str(),
00185 info_sub_.getTopic().c_str());
00186 }
00187 }
00188
00189 void callback(
00190 const sensor_msgs::ImageConstPtr& image,
00191 const sensor_msgs::ImageConstPtr& depth,
00192 const sensor_msgs::CameraInfoConstPtr& cameraInfo)
00193 {
00194 if(!this->isPaused())
00195 {
00196 if(!(image->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1) ==0 ||
00197 image->encoding.compare(sensor_msgs::image_encodings::MONO8) ==0 ||
00198 image->encoding.compare(sensor_msgs::image_encodings::MONO16) ==0 ||
00199 image->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
00200 image->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0) ||
00201 !(depth->encoding.compare(sensor_msgs::image_encodings::TYPE_16UC1)==0 ||
00202 depth->encoding.compare(sensor_msgs::image_encodings::TYPE_32FC1)==0 ||
00203 depth->encoding.compare(sensor_msgs::image_encodings::MONO16)==0))
00204 {
00205 NODELET_ERROR("Input type must be image=mono8,mono16,rgb8,bgr8 (mono8 "
00206 "recommended) and image_depth=16UC1,32FC1,mono16. Types detected: %s %s",
00207 image->encoding.c_str(), depth->encoding.c_str());
00208 return;
00209 }
00210
00211 ros::Time stamp = image->header.stamp>depth->header.stamp?image->header.stamp:depth->header.stamp;
00212
00213 Transform localTransform = getTransform(this->frameId(), image->header.frame_id, stamp);
00214 if(localTransform.isNull())
00215 {
00216 return;
00217 }
00218
00219 if(image->data.size() && depth->data.size() && cameraInfo->K[4] != 0)
00220 {
00221 rtabmap::CameraModel rtabmapModel = rtabmap_ros::cameraModelFromROS(*cameraInfo, localTransform);
00222 cv_bridge::CvImagePtr ptrImage = cv_bridge::toCvCopy(image, image->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1)==0?"":"mono8");
00223 cv_bridge::CvImagePtr ptrDepth = cv_bridge::toCvCopy(depth);
00224
00225 rtabmap::SensorData data(
00226 ptrImage->image,
00227 ptrDepth->image,
00228 rtabmapModel,
00229 0,
00230 rtabmap_ros::timestampFromROS(stamp));
00231
00232 this->processData(data, stamp);
00233 }
00234 }
00235 }
00236
00237 void callback2(
00238 const sensor_msgs::ImageConstPtr& image,
00239 const sensor_msgs::ImageConstPtr& depth,
00240 const sensor_msgs::CameraInfoConstPtr& cameraInfo,
00241 const sensor_msgs::ImageConstPtr& image2,
00242 const sensor_msgs::ImageConstPtr& depth2,
00243 const sensor_msgs::CameraInfoConstPtr& cameraInfo2)
00244 {
00245 if(!this->isPaused())
00246 {
00247 std::vector<sensor_msgs::ImageConstPtr> imageMsgs;
00248 std::vector<sensor_msgs::ImageConstPtr> depthMsgs;
00249 std::vector<sensor_msgs::CameraInfoConstPtr> infoMsgs;
00250 imageMsgs.push_back(image);
00251 imageMsgs.push_back(image2);
00252 depthMsgs.push_back(depth);
00253 depthMsgs.push_back(depth2);
00254 infoMsgs.push_back(cameraInfo);
00255 infoMsgs.push_back(cameraInfo2);
00256
00257 ros::Time higherStamp;
00258 int imageWidth = imageMsgs[0]->width;
00259 int imageHeight = imageMsgs[0]->height;
00260 int cameraCount = imageMsgs.size();
00261 cv::Mat rgb;
00262 cv::Mat depth;
00263 pcl::PointCloud<pcl::PointXYZ> scanCloud;
00264 std::vector<CameraModel> cameraModels;
00265 for(unsigned int i=0; i<imageMsgs.size(); ++i)
00266 {
00267 if(!(imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1) ==0 ||
00268 imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::MONO8) ==0 ||
00269 imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::MONO16) ==0 ||
00270 imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
00271 imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0) ||
00272 !(depthMsgs[i]->encoding.compare(sensor_msgs::image_encodings::TYPE_16UC1) == 0 ||
00273 depthMsgs[i]->encoding.compare(sensor_msgs::image_encodings::TYPE_32FC1) == 0 ||
00274 depthMsgs[i]->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0))
00275 {
00276 NODELET_ERROR("Input type must be image=mono8,mono16,rgb8,bgr8 and image_depth=32FC1,16UC1,mono16");
00277 return;
00278 }
00279 UASSERT_MSG(imageMsgs[i]->width == imageWidth && imageMsgs[i]->height == imageHeight,
00280 uFormat("imageWidth=%d vs %d imageHeight=%d vs %d",
00281 imageWidth,
00282 imageMsgs[i]->width,
00283 imageHeight,
00284 imageMsgs[i]->height).c_str());
00285 UASSERT_MSG(depthMsgs[i]->width == imageWidth && depthMsgs[i]->height == imageHeight,
00286 uFormat("imageWidth=%d vs %d imageHeight=%d vs %d",
00287 imageWidth,
00288 depthMsgs[i]->width,
00289 imageHeight,
00290 depthMsgs[i]->height).c_str());
00291
00292 ros::Time stamp = imageMsgs[i]->header.stamp>depthMsgs[i]->header.stamp?imageMsgs[i]->header.stamp:depthMsgs[i]->header.stamp;
00293
00294 if(i == 0)
00295 {
00296 higherStamp = stamp;
00297 }
00298 else if(stamp > higherStamp)
00299 {
00300 higherStamp = stamp;
00301 }
00302
00303 Transform localTransform = getTransform(this->frameId(), imageMsgs[i]->header.frame_id, stamp);
00304 if(localTransform.isNull())
00305 {
00306 return;
00307 }
00308
00309 cv_bridge::CvImageConstPtr ptrImage;
00310 if(imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1)==0)
00311 {
00312 ptrImage = cv_bridge::toCvShare(imageMsgs[i]);
00313 }
00314 else if(imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
00315 imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0)
00316 {
00317 ptrImage = cv_bridge::toCvShare(imageMsgs[i], "mono8");
00318 }
00319 else
00320 {
00321 ptrImage = cv_bridge::toCvShare(imageMsgs[i], "bgr8");
00322 }
00323 cv_bridge::CvImageConstPtr ptrDepth = cv_bridge::toCvShare(depthMsgs[i]);
00324 cv::Mat subDepth = ptrDepth->image;
00325
00326
00327 if(rgb.empty())
00328 {
00329 rgb = cv::Mat(imageHeight, imageWidth*cameraCount, ptrImage->image.type());
00330 }
00331 if(depth.empty())
00332 {
00333 depth = cv::Mat(imageHeight, imageWidth*cameraCount, subDepth.type());
00334 }
00335
00336 if(ptrImage->image.type() == rgb.type())
00337 {
00338 ptrImage->image.copyTo(cv::Mat(rgb, cv::Rect(i*imageWidth, 0, imageWidth, imageHeight)));
00339 }
00340 else
00341 {
00342 NODELET_ERROR("Some RGB images are not the same type!");
00343 return;
00344 }
00345
00346 if(subDepth.type() == depth.type())
00347 {
00348 subDepth.copyTo(cv::Mat(depth, cv::Rect(i*imageWidth, 0, imageWidth, imageHeight)));
00349 }
00350 else
00351 {
00352 NODELET_ERROR("Some Depth images are not the same type!");
00353 return;
00354 }
00355
00356 cameraModels.push_back(rtabmap_ros::cameraModelFromROS(*infoMsgs[i], localTransform));
00357 }
00358
00359 rtabmap::SensorData data(
00360 rgb,
00361 depth,
00362 cameraModels,
00363 0,
00364 rtabmap_ros::timestampFromROS(higherStamp));
00365
00366 this->processData(data, higherStamp);
00367 }
00368 }
00369
00370 protected:
00371 virtual void flushCallbacks()
00372 {
00373
00374 if(approxSync_)
00375 {
00376 delete approxSync_;
00377 approxSync_ = new message_filters::Synchronizer<MyApproxSyncPolicy>(MyApproxSyncPolicy(queueSize_), image_mono_sub_, image_depth_sub_, info_sub_);
00378 approxSync_->registerCallback(boost::bind(&RGBDOdometry::callback, this, _1, _2, _3));
00379 }
00380 if(exactSync_)
00381 {
00382 delete exactSync_;
00383 exactSync_ = new message_filters::Synchronizer<MyExactSyncPolicy>(MyExactSyncPolicy(queueSize_), image_mono_sub_, image_depth_sub_, info_sub_);
00384 exactSync_->registerCallback(boost::bind(&RGBDOdometry::callback, this, _1, _2, _3));
00385 }
00386 if(sync2_)
00387 {
00388 delete sync2_;
00389 sync2_ = new message_filters::Synchronizer<MySync2Policy>(
00390 MySync2Policy(queueSize_),
00391 image_mono_sub_,
00392 image_depth_sub_,
00393 info_sub_,
00394 image_mono2_sub_,
00395 image_depth2_sub_,
00396 info2_sub_);
00397 sync2_->registerCallback(boost::bind(&RGBDOdometry::callback2, this, _1, _2, _3, _4, _5, _6));
00398 }
00399 }
00400
00401 private:
00402 image_transport::SubscriberFilter image_mono_sub_;
00403 image_transport::SubscriberFilter image_depth_sub_;
00404 message_filters::Subscriber<sensor_msgs::CameraInfo> info_sub_;
00405 image_transport::SubscriberFilter image_mono2_sub_;
00406 image_transport::SubscriberFilter image_depth2_sub_;
00407 message_filters::Subscriber<sensor_msgs::CameraInfo> info2_sub_;
00408 typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image, sensor_msgs::CameraInfo> MyApproxSyncPolicy;
00409 message_filters::Synchronizer<MyApproxSyncPolicy> * approxSync_;
00410 typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image, sensor_msgs::CameraInfo> MyExactSyncPolicy;
00411 message_filters::Synchronizer<MyExactSyncPolicy> * exactSync_;
00412 typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image, sensor_msgs::CameraInfo, sensor_msgs::Image, sensor_msgs::Image, sensor_msgs::CameraInfo> MySync2Policy;
00413 message_filters::Synchronizer<MySync2Policy> * sync2_;
00414 int queueSize_;
00415 };
00416
00417 PLUGINLIB_EXPORT_CLASS(rtabmap_ros::RGBDOdometry, nodelet::Nodelet);
00418
00419 }