point_cloud_xyzrgb.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
28 #include <ros/ros.h>
30 #include <nodelet/nodelet.h>
31 
32 #include <pcl/point_cloud.h>
33 #include <pcl/point_types.h>
35 
37 
38 #include <sensor_msgs/PointCloud2.h>
39 #include <sensor_msgs/Image.h>
41 #include <sensor_msgs/CameraInfo.h>
42 
43 #include <stereo_msgs/DisparityImage.h>
44 
47 
50 
54 
55 #include <cv_bridge/cv_bridge.h>
56 #include <opencv2/highgui/highgui.hpp>
57 
58 #include "rtabmap/core/util2d.h"
59 #include "rtabmap/core/util3d.h"
63 #include "rtabmap/utilite/UStl.h"
64 
65 namespace rtabmap_ros
66 {
67 
69 {
70 public:
72  maxDepth_(0.0),
73  minDepth_(0.0),
74  voxelSize_(0.0),
75  decimation_(1),
76  noiseFilterRadius_(0.0),
78  normalK_(0),
79  normalRadius_(0.0),
84  exactSyncDepth_(0),
87  {}
88 
90  {
92  delete approxSyncDepth_;
94  delete approxSyncDisparity_;
96  delete approxSyncStereo_;
97  if(exactSyncDepth_)
98  delete exactSyncDepth_;
100  delete exactSyncDisparity_;
101  if(exactSyncStereo_)
102  delete exactSyncStereo_;
103  }
104 
105 private:
106  virtual void onInit()
107  {
110 
111  int queueSize = 10;
112  bool approxSync = true;
113  std::string roiStr;
114  pnh.param("approx_sync", approxSync, approxSync);
115  pnh.param("queue_size", queueSize, queueSize);
116  pnh.param("max_depth", maxDepth_, maxDepth_);
117  pnh.param("min_depth", minDepth_, minDepth_);
118  pnh.param("voxel_size", voxelSize_, voxelSize_);
119  pnh.param("decimation", decimation_, decimation_);
120  pnh.param("noise_filter_radius", noiseFilterRadius_, noiseFilterRadius_);
121  pnh.param("noise_filter_min_neighbors", noiseFilterMinNeighbors_, noiseFilterMinNeighbors_);
122  pnh.param("normal_k", normalK_, normalK_);
123  pnh.param("normal_radius", normalRadius_, normalRadius_);
124  pnh.param("filter_nans", filterNaNs_, filterNaNs_);
125  pnh.param("roi_ratios", roiStr, roiStr);
126 
127  //parse roi (region of interest)
128  roiRatios_.resize(4, 0);
129  if(!roiStr.empty())
130  {
131  std::list<std::string> strValues = uSplit(roiStr, ' ');
132  if(strValues.size() != 4)
133  {
134  ROS_ERROR("The number of values must be 4 (\"roi_ratios\"=\"%s\")", roiStr.c_str());
135  }
136  else
137  {
138  std::vector<float> tmpValues(4);
139  unsigned int i=0;
140  for(std::list<std::string>::iterator jter = strValues.begin(); jter!=strValues.end(); ++jter)
141  {
142  tmpValues[i] = uStr2Float(*jter);
143  ++i;
144  }
145 
146  if(tmpValues[0] >= 0 && tmpValues[0] < 1 && tmpValues[0] < 1.0f-tmpValues[1] &&
147  tmpValues[1] >= 0 && tmpValues[1] < 1 && tmpValues[1] < 1.0f-tmpValues[0] &&
148  tmpValues[2] >= 0 && tmpValues[2] < 1 && tmpValues[2] < 1.0f-tmpValues[3] &&
149  tmpValues[3] >= 0 && tmpValues[3] < 1 && tmpValues[3] < 1.0f-tmpValues[2])
150  {
151  roiRatios_ = tmpValues;
152  }
153  else
154  {
155  ROS_ERROR("The roi ratios are not valid (\"roi_ratios\"=\"%s\")", roiStr.c_str());
156  }
157  }
158  }
159 
160  // StereoBM parameters
162  for(rtabmap::ParametersMap::iterator iter=stereoBMParameters_.begin(); iter!=stereoBMParameters_.end(); ++iter)
163  {
164  std::string vStr;
165  bool vBool;
166  int vInt;
167  double vDouble;
168  if(pnh.getParam(iter->first, vStr))
169  {
170  NODELET_INFO("point_cloud_xyzrgb: Setting parameter \"%s\"=\"%s\"", iter->first.c_str(), vStr.c_str());
171  iter->second = vStr;
172  }
173  else if(pnh.getParam(iter->first, vBool))
174  {
175  NODELET_INFO("point_cloud_xyzrgb: Setting parameter \"%s\"=\"%s\"", iter->first.c_str(), uBool2Str(vBool).c_str());
176  iter->second = uBool2Str(vBool);
177  }
178  else if(pnh.getParam(iter->first, vDouble))
179  {
180  NODELET_INFO("point_cloud_xyzrgb: Setting parameter \"%s\"=\"%s\"", iter->first.c_str(), uNumber2Str(vDouble).c_str());
181  iter->second = uNumber2Str(vDouble);
182  }
183  else if(pnh.getParam(iter->first, vInt))
184  {
185  NODELET_INFO("point_cloud_xyzrgb: Setting parameter \"%s\"=\"%s\"", iter->first.c_str(), uNumber2Str(vInt).c_str());
186  iter->second = uNumber2Str(vInt);
187  }
188  }
189 
190  NODELET_INFO("Approximate time sync = %s", approxSync?"true":"false");
191 
192  cloudPub_ = nh.advertise<sensor_msgs::PointCloud2>("cloud", 1);
193 
194  rgbdImageSub_ = nh.subscribe("rgbd_image", 1, &PointCloudXYZRGB::rgbdImageCallback, this);
195 
196  if(approxSync)
197  {
198 
200  approxSyncDepth_->registerCallback(boost::bind(&PointCloudXYZRGB::depthCallback, this, _1, _2, _3));
201 
203  approxSyncDisparity_->registerCallback(boost::bind(&PointCloudXYZRGB::disparityCallback, this, _1, _2, _3));
204 
206  approxSyncStereo_->registerCallback(boost::bind(&PointCloudXYZRGB::stereoCallback, this, _1, _2, _3, _4));
207  }
208  else
209  {
211  exactSyncDepth_->registerCallback(boost::bind(&PointCloudXYZRGB::depthCallback, this, _1, _2, _3));
212 
214  exactSyncDisparity_->registerCallback(boost::bind(&PointCloudXYZRGB::disparityCallback, this, _1, _2, _3));
215 
217  exactSyncStereo_->registerCallback(boost::bind(&PointCloudXYZRGB::stereoCallback, this, _1, _2, _3, _4));
218  }
219 
220  ros::NodeHandle rgb_nh(nh, "rgb");
221  ros::NodeHandle depth_nh(nh, "depth");
222  ros::NodeHandle rgb_pnh(pnh, "rgb");
223  ros::NodeHandle depth_pnh(pnh, "depth");
224  image_transport::ImageTransport rgb_it(rgb_nh);
225  image_transport::ImageTransport depth_it(depth_nh);
226  image_transport::TransportHints hintsRgb("raw", ros::TransportHints(), rgb_pnh);
227  image_transport::TransportHints hintsDepth("raw", ros::TransportHints(), depth_pnh);
228 
229  imageSub_.subscribe(rgb_it, rgb_nh.resolveName("image"), 1, hintsRgb);
230  imageDepthSub_.subscribe(depth_it, depth_nh.resolveName("image"), 1, hintsDepth);
231  cameraInfoSub_.subscribe(rgb_nh, "camera_info", 1);
232 
233  ros::NodeHandle left_nh(nh, "left");
234  ros::NodeHandle right_nh(nh, "right");
235  ros::NodeHandle left_pnh(pnh, "left");
236  ros::NodeHandle right_pnh(pnh, "right");
237  image_transport::ImageTransport left_it(left_nh);
238  image_transport::ImageTransport right_it(right_nh);
239  image_transport::TransportHints hintsLeft("raw", ros::TransportHints(), left_pnh);
240  image_transport::TransportHints hintsRight("raw", ros::TransportHints(), right_pnh);
241 
242  imageDisparitySub_.subscribe(nh, "disparity", 1);
243 
244  imageLeft_.subscribe(left_it, left_nh.resolveName("image"), 1, hintsLeft);
245  imageRight_.subscribe(right_it, right_nh.resolveName("image"), 1, hintsRight);
246  cameraInfoLeft_.subscribe(left_nh, "camera_info", 1);
247  cameraInfoRight_.subscribe(right_nh, "camera_info", 1);
248  }
249 
251  const sensor_msgs::ImageConstPtr& image,
252  const sensor_msgs::ImageConstPtr& imageDepth,
253  const sensor_msgs::CameraInfoConstPtr& cameraInfo)
254  {
255  if(!(image->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1) ==0 ||
256  image->encoding.compare(sensor_msgs::image_encodings::MONO8) ==0 ||
257  image->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0 ||
258  image->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
259  image->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0 ||
260  image->encoding.compare(sensor_msgs::image_encodings::BGRA8) == 0 ||
261  image->encoding.compare(sensor_msgs::image_encodings::RGBA8) == 0 ||
262  image->encoding.compare(sensor_msgs::image_encodings::BAYER_GRBG8) == 0) ||
263  !(imageDepth->encoding.compare(sensor_msgs::image_encodings::TYPE_16UC1)==0 ||
264  imageDepth->encoding.compare(sensor_msgs::image_encodings::TYPE_32FC1)==0 ||
265  imageDepth->encoding.compare(sensor_msgs::image_encodings::MONO16)==0))
266  {
267  NODELET_ERROR("Input type must be image=mono8,mono16,rgb8,bgr8 and image_depth=32FC1,16UC1,mono16");
268  return;
269  }
270 
272  {
274 
276  if(image->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1)==0)
277  {
278  imagePtr = cv_bridge::toCvShare(image);
279  }
280  else if(image->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
281  image->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0)
282  {
283  imagePtr = cv_bridge::toCvShare(image, "mono8");
284  }
285  else
286  {
287  imagePtr = cv_bridge::toCvShare(image, "bgr8");
288  }
289 
290  cv_bridge::CvImageConstPtr imageDepthPtr = cv_bridge::toCvShare(imageDepth);
291 
293  model.fromCameraInfo(*cameraInfo);
294 
295  ROS_ASSERT(imageDepthPtr->image.cols == imagePtr->image.cols);
296  ROS_ASSERT(imageDepthPtr->image.rows == imagePtr->image.rows);
297 
298  pcl::PointCloud<pcl::PointXYZRGB>::Ptr pclCloud;
299  cv::Rect roi = rtabmap::util2d::computeRoi(imageDepthPtr->image, roiRatios_);
300 
302  model.fx(),
303  model.fy(),
304  model.cx()-roiRatios_[0]*double(imageDepthPtr->image.cols),
305  model.cy()-roiRatios_[2]*double(imageDepthPtr->image.rows));
306  pcl::IndicesPtr indices(new std::vector<int>);
308  cv::Mat(imagePtr->image, roi),
309  cv::Mat(imageDepthPtr->image, roi),
310  m,
311  decimation_,
312  maxDepth_,
313  minDepth_,
314  indices.get());
315 
316 
317  processAndPublish(pclCloud, indices, imagePtr->header);
318 
319  NODELET_DEBUG("point_cloud_xyzrgb from RGB-D time = %f s", (ros::WallTime::now() - time).toSec());
320  }
321  }
322 
324  const sensor_msgs::ImageConstPtr& image,
325  const stereo_msgs::DisparityImageConstPtr& imageDisparity,
326  const sensor_msgs::CameraInfoConstPtr& cameraInfo)
327  {
329  if(image->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1)==0)
330  {
331  imagePtr = cv_bridge::toCvShare(image);
332  }
333  else if(image->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
334  image->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0)
335  {
336  imagePtr = cv_bridge::toCvShare(image, "mono8");
337  }
338  else
339  {
340  imagePtr = cv_bridge::toCvShare(image, "bgr8");
341  }
342 
343  if(imageDisparity->image.encoding.compare(sensor_msgs::image_encodings::TYPE_32FC1) !=0 &&
344  imageDisparity->image.encoding.compare(sensor_msgs::image_encodings::TYPE_16SC1) !=0)
345  {
346  NODELET_ERROR("Input type must be disparity=32FC1 or 16SC1");
347  return;
348  }
349 
350  cv::Mat disparity;
351  if(imageDisparity->image.encoding.compare(sensor_msgs::image_encodings::TYPE_32FC1) == 0)
352  {
353  disparity = cv::Mat(imageDisparity->image.height, imageDisparity->image.width, CV_32FC1, const_cast<uchar*>(imageDisparity->image.data.data()));
354  }
355  else
356  {
357  disparity = cv::Mat(imageDisparity->image.height, imageDisparity->image.width, CV_16SC1, const_cast<uchar*>(imageDisparity->image.data.data()));
358  }
359 
361  {
363 
364  cv::Rect roi = rtabmap::util2d::computeRoi(disparity, roiRatios_);
365 
366  pcl::PointCloud<pcl::PointXYZRGB>::Ptr pclCloud;
367  rtabmap::CameraModel leftModel = rtabmap_ros::cameraModelFromROS(*cameraInfo);
368  rtabmap::StereoCameraModel stereoModel(imageDisparity->f, imageDisparity->f, leftModel.cx()-roiRatios_[0]*double(disparity.cols), leftModel.cy()-roiRatios_[2]*double(disparity.rows), imageDisparity->T);
369  pcl::IndicesPtr indices(new std::vector<int>);
371  cv::Mat(imagePtr->image, roi),
372  cv::Mat(disparity, roi),
373  stereoModel,
374  decimation_,
375  maxDepth_,
376  minDepth_,
377  indices.get());
378 
379  processAndPublish(pclCloud, indices, imageDisparity->header);
380 
381  NODELET_DEBUG("point_cloud_xyzrgb from disparity time = %f s", (ros::WallTime::now() - time).toSec());
382  }
383  }
384 
385  void stereoCallback(const sensor_msgs::ImageConstPtr& imageLeft,
386  const sensor_msgs::ImageConstPtr& imageRight,
387  const sensor_msgs::CameraInfoConstPtr& camInfoLeft,
388  const sensor_msgs::CameraInfoConstPtr& camInfoRight)
389  {
390  if(!(imageLeft->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
391  imageLeft->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0 ||
392  imageLeft->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
393  imageLeft->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0) ||
394  !(imageRight->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
395  imageRight->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0 ||
396  imageRight->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
397  imageRight->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0))
398  {
399  NODELET_ERROR("Input type must be image=mono8,mono16,rgb8,bgr8 (enc=%s)", imageLeft->encoding.c_str());
400  return;
401  }
402 
404  {
406 
407  cv_bridge::CvImageConstPtr ptrLeftImage, ptrRightImage;
408  if(imageLeft->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
409  imageLeft->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0)
410  {
411  ptrLeftImage = cv_bridge::toCvShare(imageLeft, "mono8");
412  }
413  else
414  {
415  ptrLeftImage = cv_bridge::toCvShare(imageLeft, "bgr8");
416  }
417  ptrRightImage = cv_bridge::toCvShare(imageRight, "mono8");
418 
419  if(roiRatios_[0]!=0.0f || roiRatios_[1]!=0.0f || roiRatios_[2]!=0.0f || roiRatios_[3]!=0.0f)
420  {
421  ROS_WARN("\"roi_ratios\" set but ignored for stereo images.");
422  }
423 
424  pcl::PointCloud<pcl::PointXYZRGB>::Ptr pclCloud;
425  pcl::IndicesPtr indices(new std::vector<int>);
427  ptrLeftImage->image,
428  ptrRightImage->image,
429  rtabmap_ros::stereoCameraModelFromROS(*camInfoLeft, *camInfoRight),
430  decimation_,
431  maxDepth_,
432  minDepth_,
433  indices.get(),
435 
436  processAndPublish(pclCloud, indices, imageLeft->header);
437 
438  NODELET_DEBUG("point_cloud_xyzrgb from stereo time = %f s", (ros::WallTime::now() - time).toSec());
439  }
440  }
441 
442  void rgbdImageCallback(const rtabmap_ros::RGBDImageConstPtr & image)
443  {
445  {
447 
449  pcl::PointCloud<pcl::PointXYZRGB>::Ptr pclCloud;
450  pcl::IndicesPtr indices(new std::vector<int>);
451  if(data.isValid())
452  {
454  data,
455  decimation_,
456  maxDepth_,
457  minDepth_,
458  indices.get(),
460 
461  processAndPublish(pclCloud, indices, image->header);
462  }
463 
464  NODELET_DEBUG("point_cloud_xyzrgb from rgbd_image time = %f s", (ros::WallTime::now() - time).toSec());
465  }
466  }
467 
468  void processAndPublish(pcl::PointCloud<pcl::PointXYZRGB>::Ptr & pclCloud, pcl::IndicesPtr & indices, const std_msgs::Header & header)
469  {
470  if(indices->size() && voxelSize_ > 0.0)
471  {
472  pclCloud = rtabmap::util3d::voxelize(pclCloud, indices, voxelSize_);
473  }
474 
475  // Do radius filtering after voxel filtering ( a lot faster)
476  if(pclCloud->size() && noiseFilterRadius_ > 0.0 && noiseFilterMinNeighbors_ > 0)
477  {
478  if(pclCloud->is_dense)
479  {
481  }
482  else
483  {
485  }
486  pcl::PointCloud<pcl::PointXYZRGB>::Ptr tmp(new pcl::PointCloud<pcl::PointXYZRGB>);
487  pcl::copyPointCloud(*pclCloud, *indices, *tmp);
488  pclCloud = tmp;
489  }
490 
491  sensor_msgs::PointCloud2 rosCloud;
492  if(pclCloud->size() && (normalK_ > 0 || normalRadius_ > 0.0f))
493  {
494  //compute normals
495  pcl::PointCloud<pcl::Normal>::Ptr normals = rtabmap::util3d::computeNormals(pclCloud, normalK_, normalRadius_);
496  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pclCloudNormal(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
497  pcl::concatenateFields(*pclCloud, *normals, *pclCloudNormal);
498  if(filterNaNs_)
499  {
500  pclCloudNormal = rtabmap::util3d::removeNaNNormalsFromPointCloud(pclCloudNormal);
501  }
502  pcl::toROSMsg(*pclCloudNormal, rosCloud);
503  }
504  else
505  {
506  if(filterNaNs_ && !pclCloud->is_dense)
507  {
508  pclCloud = rtabmap::util3d::removeNaNFromPointCloud(pclCloud);
509  }
510  pcl::toROSMsg(*pclCloud, rosCloud);
511  }
512  rosCloud.header.stamp = header.stamp;
513  rosCloud.header.frame_id = header.frame_id;
514 
515  //publish the message
516  cloudPub_.publish(rosCloud);
517  }
518 
519 private:
520 
521  double maxDepth_;
522  double minDepth_;
523  double voxelSize_;
527  int normalK_;
530  std::vector<float> roiRatios_;
532 
534 
536 
540 
542 
547 
550 
553 
556 
559 
562 
565 };
566 
568 }
569 
CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
const std::string BAYER_GRBG8
void processAndPublish(pcl::PointCloud< pcl::PointXYZRGB >::Ptr &pclCloud, pcl::IndicesPtr &indices, const std_msgs::Header &header)
void disparityCallback(const sensor_msgs::ImageConstPtr &image, const stereo_msgs::DisparityImageConstPtr &imageDisparity, const sensor_msgs::CameraInfoConstPtr &cameraInfo)
#define NODELET_ERROR(...)
rtabmap::StereoCameraModel stereoCameraModelFromROS(const sensor_msgs::CameraInfo &leftCamInfo, const sensor_msgs::CameraInfo &rightCamInfo, const rtabmap::Transform &localTransform=rtabmap::Transform::getIdentity())
void publish(const boost::shared_ptr< M > &message) const
f
rtabmap::SensorData rgbdImageFromROS(const rtabmap_ros::RGBDImageConstPtr &image)
message_filters::Subscriber< sensor_msgs::CameraInfo > cameraInfoSub_
Subscriber subscribe(const std::string &topic, uint32_t queue_size, void(T::*fp)(M), T *obj, const TransportHints &transport_hints=TransportHints())
std::string resolveName(const std::string &name, bool remap=true) const
pcl::PointCloud< pcl::PointXYZ >::Ptr RTABMAP_EXP removeNaNFromPointCloud(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud)
void stereoCallback(const sensor_msgs::ImageConstPtr &imageLeft, const sensor_msgs::ImageConstPtr &imageRight, const sensor_msgs::CameraInfoConstPtr &camInfoLeft, const sensor_msgs::CameraInfoConstPtr &camInfoRight)
message_filters::sync_policies::ApproximateTime< sensor_msgs::Image, sensor_msgs::Image, sensor_msgs::CameraInfo > MyApproxSyncDepthPolicy
std::map< std::string, std::string > ParametersMap
const std::string TYPE_8UC1
#define ROS_WARN(...)
std::string uBool2Str(bool boolean)
image_transport::SubscriberFilter imageRight_
std::list< std::string > uSplit(const std::string &str, char separator= ' ')
ros::NodeHandle & getPrivateNodeHandle() const
pcl::PointCloud< pcl::PointXYZRGB >::Ptr RTABMAP_EXP cloudFromDepthRGB(const cv::Mat &imageRgb, const cv::Mat &imageDepth, const CameraModel &model, int decimation=1, float maxDepth=0.0f, float minDepth=0.0f, std::vector< int > *validIndices=0)
message_filters::sync_policies::ApproximateTime< sensor_msgs::Image, sensor_msgs::Image, sensor_msgs::CameraInfo, sensor_msgs::CameraInfo > MyApproxSyncStereoPolicy
bool isValid() const
message_filters::Synchronizer< MyApproxSyncStereoPolicy > * approxSyncStereo_
message_filters::Synchronizer< MyExactSyncStereoPolicy > * exactSyncStereo_
std::string uNumber2Str(unsigned int number)
double cx() const
message_filters::Synchronizer< MyApproxSyncDepthPolicy > * approxSyncDepth_
image_transport::SubscriberFilter imageDepthSub_
message_filters::sync_policies::ExactTime< sensor_msgs::Image, sensor_msgs::Image, sensor_msgs::CameraInfo > MyExactSyncDepthPolicy
void depthCallback(const sensor_msgs::ImageConstPtr &image, const sensor_msgs::ImageConstPtr &imageDepth, const sensor_msgs::CameraInfoConstPtr &cameraInfo)
message_filters::Synchronizer< MyExactSyncDepthPolicy > * exactSyncDepth_
bool fromCameraInfo(const sensor_msgs::CameraInfo &msg)
bool param(const std::string &param_name, T &param_val, const T &default_val) const
message_filters::sync_policies::ExactTime< sensor_msgs::Image, stereo_msgs::DisparityImage, sensor_msgs::CameraInfo > MyExactSyncDisparityPolicy
const std::string TYPE_32FC1
pcl::PointCloud< pcl::PointXYZRGB >::Ptr RTABMAP_EXP cloudRGBFromSensorData(const SensorData &sensorData, int decimation=1, float maxDepth=0.0f, float minDepth=0.0f, std::vector< int > *validIndices=0, const ParametersMap &stereoParameters=ParametersMap(), const std::vector< float > &roiRatios=std::vector< float >())
rtabmap::CameraModel cameraModelFromROS(const sensor_msgs::CameraInfo &camInfo, const rtabmap::Transform &localTransform=rtabmap::Transform::getIdentity())
cv::Rect RTABMAP_EXP computeRoi(const cv::Mat &image, const std::string &roiRatios)
image_transport::SubscriberFilter imageSub_
const std::string TYPE_16UC1
cv::Mat RTABMAP_EXP computeNormals(const cv::Mat &laserScan, int searchK, float searchRadius)
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
const std::string MONO16
ros::NodeHandle & getNodeHandle() const
double cy() const
message_filters::Subscriber< sensor_msgs::CameraInfo > cameraInfoRight_
void subscribe(ImageTransport &it, const std::string &base_topic, uint32_t queue_size, const TransportHints &transport_hints=TransportHints())
#define false
pcl::IndicesPtr RTABMAP_EXP radiusFiltering(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, float radiusSearch, int minNeighborsInRadius)
static const ParametersMap & getDefaultParameters()
#define NODELET_INFO(...)
uint32_t getNumSubscribers() const
void subscribe(ros::NodeHandle &nh, const std::string &topic, uint32_t queue_size, const ros::TransportHints &transport_hints=ros::TransportHints(), ros::CallbackQueueInterface *callback_queue=0)
static WallTime now()
message_filters::Synchronizer< MyApproxSyncDisparityPolicy > * approxSyncDisparity_
bool getParam(const std::string &key, std::string &s) const
void toROSMsg(const sensor_msgs::PointCloud2 &cloud, sensor_msgs::Image &image)
message_filters::Subscriber< stereo_msgs::DisparityImage > imageDisparitySub_
float uStr2Float(const std::string &str)
PLUGINLIB_EXPORT_CLASS(rtabmap_ros::CoreWrapper, nodelet::Nodelet)
message_filters::sync_policies::ApproximateTime< sensor_msgs::Image, stereo_msgs::DisparityImage, sensor_msgs::CameraInfo > MyApproxSyncDisparityPolicy
pcl::PointCloud< pcl::PointNormal >::Ptr RTABMAP_EXP removeNaNNormalsFromPointCloud(const pcl::PointCloud< pcl::PointNormal >::Ptr &cloud)
pcl::PointCloud< pcl::PointXYZRGB >::Ptr RTABMAP_EXP cloudFromStereoImages(const cv::Mat &imageLeft, const cv::Mat &imageRight, const StereoCameraModel &model, int decimation=1, float maxDepth=0.0f, float minDepth=0.0f, std::vector< int > *validIndices=0, const ParametersMap &parameters=ParametersMap())
message_filters::sync_policies::ExactTime< sensor_msgs::Image, sensor_msgs::Image, sensor_msgs::CameraInfo, sensor_msgs::CameraInfo > MyExactSyncStereoPolicy
#define ROS_ASSERT(cond)
message_filters::Subscriber< sensor_msgs::CameraInfo > cameraInfoLeft_
pcl::PointCloud< pcl::PointXYZRGB >::Ptr RTABMAP_EXP cloudFromDisparityRGB(const cv::Mat &imageRgb, const cv::Mat &imageDisparity, const StereoCameraModel &model, int decimation=1, float maxDepth=0.0f, float minDepth=0.0f, std::vector< int > *validIndices=0)
pcl::PointCloud< pcl::PointXYZ >::Ptr RTABMAP_EXP voxelize(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, const pcl::IndicesPtr &indices, float voxelSize)
#define ROS_ERROR(...)
rtabmap::ParametersMap stereoBMParameters_
#define NODELET_DEBUG(...)
void rgbdImageCallback(const rtabmap_ros::RGBDImageConstPtr &image)
image_transport::SubscriberFilter imageLeft_
message_filters::Synchronizer< MyExactSyncDisparityPolicy > * exactSyncDisparity_
const std::string TYPE_16SC1


rtabmap_ros
Author(s): Mathieu Labbe
autogenerated on Fri Jun 7 2019 21:55:04