MsgConversion.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
29 
30 #include <opencv2/highgui/highgui.hpp>
31 #include <zlib.h>
32 #include <ros/ros.h>
33 #include <rtabmap/core/util3d.h>
37 #include <rtabmap/utilite/UStl.h>
47 
48 namespace rtabmap_ros {
49 
50 void transformToTF(const rtabmap::Transform & transform, tf::Transform & tfTransform)
51 {
52  if(!transform.isNull())
53  {
54  tf::transformEigenToTF(transform.toEigen3d(), tfTransform);
55  }
56  else
57  {
58  tfTransform = tf::Transform();
59  }
60 }
61 
63 {
64  Eigen::Affine3d eigenTf;
65  tf::transformTFToEigen(transform, eigenTf);
66  return rtabmap::Transform::fromEigen3d(eigenTf);
67 }
68 
69 void transformToGeometryMsg(const rtabmap::Transform & transform, geometry_msgs::Transform & msg)
70 {
71  if(!transform.isNull())
72  {
73  tf::transformEigenToMsg(transform.toEigen3d(), msg);
74 
75  // make sure the quaternion is normalized
76  long double recipNorm = 1.0 / sqrt(msg.rotation.x * msg.rotation.x + msg.rotation.y * msg.rotation.y + msg.rotation.z * msg.rotation.z + msg.rotation.w * msg.rotation.w);
77  msg.rotation.x *= recipNorm;
78  msg.rotation.y *= recipNorm;
79  msg.rotation.z *= recipNorm;
80  msg.rotation.w *= recipNorm;
81  }
82  else
83  {
84  msg = geometry_msgs::Transform();
85  }
86 }
87 
88 
89 rtabmap::Transform transformFromGeometryMsg(const geometry_msgs::Transform & msg)
90 {
91  if(msg.rotation.w == 0 &&
92  msg.rotation.x == 0 &&
93  msg.rotation.y == 0 &&
94  msg.rotation.z ==0)
95  {
96  return rtabmap::Transform();
97  }
98 
99  Eigen::Affine3d tfTransform;
100  tf::transformMsgToEigen(msg, tfTransform);
101  return rtabmap::Transform::fromEigen3d(tfTransform);
102 }
103 
104 void transformToPoseMsg(const rtabmap::Transform & transform, geometry_msgs::Pose & msg)
105 {
106  if(!transform.isNull())
107  {
108  tf::poseEigenToMsg(transform.toEigen3d(), msg);
109  }
110  else
111  {
112  msg = geometry_msgs::Pose();
113  }
114 }
115 
116 rtabmap::Transform transformFromPoseMsg(const geometry_msgs::Pose & msg, bool ignoreRotationIfNotSet)
117 {
118  if(msg.orientation.w == 0 &&
119  msg.orientation.x == 0 &&
120  msg.orientation.y == 0 &&
121  msg.orientation.z == 0)
122  {
123  if(ignoreRotationIfNotSet)
124  {
125  return rtabmap::Transform(msg.position.x, msg.position.y, msg.position.z, 0, 0, 0);
126  }
127  return rtabmap::Transform();
128  }
129  Eigen::Affine3d tfPose;
130  tf::poseMsgToEigen(msg, tfPose);
131  return rtabmap::Transform::fromEigen3d(tfPose);
132 }
133 
134 void toCvCopy(const rtabmap_ros::RGBDImage & image, cv_bridge::CvImagePtr & rgb, cv_bridge::CvImagePtr & depth)
135 {
136  if(!image.rgb.data.empty())
137  {
138  rgb = cv_bridge::toCvCopy(image.rgb);
139  }
140  else if(!image.rgb_compressed.data.empty())
141  {
142 #ifdef CV_BRIDGE_HYDRO
143  ROS_ERROR("Unsupported compressed image copy, please upgrade at least to ROS Indigo to use this.");
144 #else
145  rgb = cv_bridge::toCvCopy(image.rgb_compressed);
146 #endif
147  }
148 
149  if(!image.depth.data.empty())
150  {
151  depth = cv_bridge::toCvCopy(image.depth);
152  }
153  else if(!image.depth_compressed.data.empty())
154  {
155  cv_bridge::CvImagePtr ptr = boost::make_shared<cv_bridge::CvImage>();
156  ptr->header = image.depth_compressed.header;
157  ptr->image = rtabmap::uncompressImage(image.depth_compressed.data);
158  ROS_ASSERT(ptr->image.empty() || ptr->image.type() == CV_32FC1 || ptr->image.type() == CV_16UC1);
159  ptr->encoding = ptr->image.empty()?"":ptr->image.type() == CV_32FC1?sensor_msgs::image_encodings::TYPE_32FC1:sensor_msgs::image_encodings::TYPE_16UC1;
160  depth = ptr;
161  }
162 }
163 
164 void toCvShare(const rtabmap_ros::RGBDImageConstPtr & image, cv_bridge::CvImageConstPtr & rgb, cv_bridge::CvImageConstPtr & depth)
165 {
166  if(!image->rgb.data.empty())
167  {
168  rgb = cv_bridge::toCvShare(image->rgb, image);
169  }
170  else if(!image->rgb_compressed.data.empty())
171  {
172 #ifdef CV_BRIDGE_HYDRO
173  ROS_ERROR("Unsupported compressed image copy, please upgrade at least to ROS Indigo to use this.");
174 #else
175  rgb = cv_bridge::toCvCopy(image->rgb_compressed);
176 #endif
177  }
178 
179  if(!image->depth.data.empty())
180  {
181  depth = cv_bridge::toCvShare(image->depth, image);
182  }
183  else if(!image->depth_compressed.data.empty())
184  {
185  if(image->depth_compressed.format.compare("jpg")==0)
186  {
187 #ifdef CV_BRIDGE_HYDRO
188  ROS_ERROR("Unsupported compressed image copy, please upgrade at least to ROS Indigo to use this.");
189 #else
190  depth = cv_bridge::toCvCopy(image->depth_compressed);
191 #endif
192  }
193  else
194  {
195  cv_bridge::CvImagePtr ptr = boost::make_shared<cv_bridge::CvImage>();
196  ptr->header = image->depth_compressed.header;
197  ptr->image = rtabmap::uncompressImage(image->depth_compressed.data);
198  ROS_ASSERT(ptr->image.empty() || ptr->image.type() == CV_32FC1 || ptr->image.type() == CV_16UC1);
199  ptr->encoding = ptr->image.empty()?"":ptr->image.type() == CV_32FC1?sensor_msgs::image_encodings::TYPE_32FC1:sensor_msgs::image_encodings::TYPE_16UC1;
200  depth = ptr;
201  }
202  }
203 }
204 
205 void rgbdImageToROS(const rtabmap::SensorData & data, rtabmap_ros::RGBDImage & msg, const std::string & sensorFrameId)
206 {
207  std_msgs::Header header;
208  header.frame_id = sensorFrameId;
209  header.stamp = ros::Time(data.stamp());
210  rtabmap::Transform localTransform;
211  if(data.cameraModels().size()>1)
212  {
213  UERROR("Cannot convert multi-camera data to rgbd image");
214  return;
215  }
216  if(data.cameraModels().size() == 1)
217  {
218  //rgb+depth
219  rtabmap_ros::cameraModelToROS(data.cameraModels().front(), msg.rgb_camera_info);
220  msg.rgb_camera_info.header = header;
221  localTransform = data.cameraModels().front().localTransform();
222  }
223  else
224  {
225  //stereo
226  rtabmap_ros::cameraModelToROS(data.stereoCameraModel().left(), msg.rgb_camera_info);
227  rtabmap_ros::cameraModelToROS(data.stereoCameraModel().right(), msg.depth_camera_info);
228  msg.rgb_camera_info.header = header;
229  msg.depth_camera_info.header = header;
230  localTransform = data.stereoCameraModel().localTransform();
231  }
232 
233  if(!data.imageRaw().empty())
234  {
235  cv_bridge::CvImage cvImg;
236  cvImg.header = header;
237  cvImg.image = data.imageRaw();
238  UASSERT(data.imageRaw().type()==CV_8UC1 || data.imageRaw().type()==CV_8UC3);
240  cvImg.toImageMsg(msg.rgb);
241  }
242  else if(!data.imageCompressed().empty())
243  {
244  ROS_ERROR("Conversion of compressed SensorData to RGBDImage is not implemented...");
245  }
246 
247  if(!data.depthOrRightRaw().empty())
248  {
249  cv_bridge::CvImage cvDepth;
250  cvDepth.header = header;
251  cvDepth.image = data.depthOrRightRaw();
252  UASSERT(data.depthOrRightRaw().type()==CV_8UC1 || data.depthOrRightRaw().type()==CV_16UC1 || data.depthOrRightRaw().type()==CV_32FC1);
254  cvDepth.toImageMsg(msg.depth);
255  }
256  else if(!data.depthOrRightCompressed().empty())
257  {
258  ROS_ERROR("Conversion of compressed SensorData to RGBDImage is not implemented...");
259  }
260 
261  //convert features
262  if(!data.keypoints().empty())
263  {
264  rtabmap_ros::keypointsToROS(data.keypoints(), msg.key_points);
265  }
266  if(!data.keypoints3D().empty())
267  {
268  rtabmap_ros::points3fToROS(data.keypoints3D(), msg.points, localTransform.inverse());
269  }
270  if(!data.descriptors().empty())
271  {
272  msg.descriptors = rtabmap::compressData(data.descriptors());
273  }
274  if(!data.globalDescriptors().empty())
275  {
276  rtabmap_ros::globalDescriptorToROS(data.globalDescriptors().front(), msg.global_descriptor);
277  msg.global_descriptor.header = header;
278  }
279 }
280 
281 rtabmap::SensorData rgbdImageFromROS(const rtabmap_ros::RGBDImageConstPtr & image)
282 {
283  rtabmap::SensorData data;
286  toCvShare(image, imageMsg, depthMsg);
287 
288  rtabmap::StereoCameraModel stereoModel = stereoCameraModelFromROS(image->rgb_camera_info, image->depth_camera_info, rtabmap::Transform::getIdentity());
289 
290  if(stereoModel.isValidForProjection())
291  {
292  cv_bridge::CvImageConstPtr imageRectLeft = imageMsg;
293  cv_bridge::CvImageConstPtr imageRectRight = depthMsg;
294  if(!(imageRectLeft->encoding.compare(sensor_msgs::image_encodings::MONO8) ==0 ||
295  imageRectLeft->encoding.compare(sensor_msgs::image_encodings::MONO16) ==0 ||
296  imageRectLeft->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
297  imageRectLeft->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0) ||
298  !(imageRectRight->encoding.compare(sensor_msgs::image_encodings::MONO8) ==0 ||
299  imageRectRight->encoding.compare(sensor_msgs::image_encodings::MONO16) ==0 ||
300  imageRectRight->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
301  imageRectRight->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0))
302  {
303  ROS_ERROR("Input type must be image=mono8,mono16,rgb8,bgr8 (mono8 recommended), received types are %s (left) and %s (right)",
304  imageRectLeft->encoding.c_str(), imageRectRight->encoding.c_str());
305  return data;
306  }
307 
308  if(!imageRectLeft->image.empty() && !imageRectRight->image.empty())
309  {
310  if(stereoModel.baseline() > 10.0)
311  {
312  static bool shown = false;
313  if(!shown)
314  {
315  ROS_WARN("Detected baseline (%f m) is quite large! Is your "
316  "right camera_info P(0,3) correctly set? Note that "
317  "baseline=-P(0,3)/P(0,0). This warning is printed only once.",
318  stereoModel.baseline());
319  shown = true;
320  }
321  }
322 
323  cv::Mat left, right;
324  if(imageRectLeft->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
325  imageRectLeft->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0)
326  {
327  left = cv_bridge::cvtColor(imageRectLeft, "mono8")->image;
328  }
329  else
330  {
331  left = cv_bridge::cvtColor(imageRectLeft, "bgr8")->image;
332  }
333  right = cv_bridge::cvtColor(imageRectRight, "mono8")->image;
334 
335  //
336 
337  data = rtabmap::SensorData(
338  left,
339  right,
340  stereoModel,
341  0,
342  rtabmap_ros::timestampFromROS(image->header.stamp));
343  }
344  else
345  {
346  ROS_WARN("Odom: input images empty?!?");
347  }
348  }
349  else //depth
350  {
351  ros::Time higherStamp;
352  int imageWidth = imageMsg->image.cols;
353  int imageHeight = imageMsg->image.rows;
354  int depthWidth = depthMsg->image.cols;
355  int depthHeight = depthMsg->image.rows;
356 
357  UASSERT_MSG(
358  imageWidth % depthWidth == 0 && imageHeight % depthHeight == 0 &&
359  imageWidth/depthWidth == imageHeight/depthHeight,
360  uFormat("rgb=%dx%d depth=%dx%d", imageWidth, imageHeight, depthWidth, depthHeight).c_str());
361 
362  cv::Mat rgb;
363  cv::Mat depth;
364  rtabmap::CameraModel cameraModels;
365 
366  if(!(imageMsg->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1) ==0 ||
367  imageMsg->encoding.compare(sensor_msgs::image_encodings::MONO8) ==0 ||
368  imageMsg->encoding.compare(sensor_msgs::image_encodings::MONO16) ==0 ||
369  imageMsg->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
370  imageMsg->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0 ||
371  imageMsg->encoding.compare(sensor_msgs::image_encodings::BGRA8) == 0 ||
372  imageMsg->encoding.compare(sensor_msgs::image_encodings::RGBA8) == 0 ||
373  imageMsg->encoding.compare(sensor_msgs::image_encodings::BAYER_GRBG8) == 0) ||
374  !(depthMsg->encoding.compare(sensor_msgs::image_encodings::TYPE_16UC1) == 0 ||
375  depthMsg->encoding.compare(sensor_msgs::image_encodings::TYPE_32FC1) == 0 ||
376  depthMsg->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0))
377  {
378  ROS_ERROR("Input type must be image=mono8,mono16,rgb8,bgr8,bgra8,rgba8 and "
379  "image_depth=32FC1,16UC1,mono16. Current rgb=%s and depth=%s",
380  imageMsg->encoding.c_str(),
381  depthMsg->encoding.c_str());
382  return data;
383  }
384 
385  cv_bridge::CvImageConstPtr ptrImage = imageMsg;
386  if(imageMsg->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1)==0 ||
387  imageMsg->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
388  imageMsg->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0)
389  {
390  // do nothing
391  }
392  else if(imageMsg->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0)
393  {
394  ptrImage = cv_bridge::cvtColor(imageMsg, "mono8");
395  }
396  else
397  {
398  ptrImage = cv_bridge::cvtColor(imageMsg, "bgr8");
399  }
400 
401  cv_bridge::CvImageConstPtr ptrDepth = depthMsg;
402  data = rtabmap::SensorData(
403  ptrImage->image,
404  ptrDepth->image,
405  rtabmap_ros::cameraModelFromROS(image->rgb_camera_info),
406  0,
407  rtabmap_ros::timestampFromROS(image->header.stamp));
408  }
409 
410  return data;
411 }
412 
413 void compressedMatToBytes(const cv::Mat & compressed, std::vector<unsigned char> & bytes)
414 {
415  UASSERT(compressed.empty() || compressed.type() == CV_8UC1);
416  bytes.clear();
417  if(!compressed.empty())
418  {
419  bytes.resize(compressed.cols * compressed.rows);
420  memcpy(bytes.data(), compressed.data, bytes.size());
421  }
422 }
423 
424 cv::Mat compressedMatFromBytes(const std::vector<unsigned char> & bytes, bool copy)
425 {
426  cv::Mat out;
427  if(bytes.size())
428  {
429  out = cv::Mat(1, bytes.size(), CV_8UC1, (void*)bytes.data());
430  if(copy)
431  {
432  out = out.clone();
433  }
434  }
435  return out;
436 }
437 
438 void infoFromROS(const rtabmap_ros::Info & info, rtabmap::Statistics & stat)
439 {
440  stat.setExtended(true); // Extended
441 
442  // rtabmap_ros::Info
443  stat.setRefImageId(info.refId);
444  stat.setLoopClosureId(info.loopClosureId);
445  stat.setProximityDetectionId(info.proximityDetectionId);
446  stat.setStamp(info.header.stamp.toSec());
447 
448  stat.setLoopClosureTransform(rtabmap_ros::transformFromGeometryMsg(info.loopClosureTransform));
449 
450  //wmState
451  stat.setWmState(info.wmState);
452 
453  //Posterior, likelihood, childCount
454  std::map<int, float> mapIntFloat;
455  for(unsigned int i=0; i<info.posteriorKeys.size() && i<info.posteriorValues.size(); ++i)
456  {
457  mapIntFloat.insert(std::pair<int, float>(info.posteriorKeys.at(i), info.posteriorValues.at(i)));
458  }
459  stat.setPosterior(mapIntFloat);
460 
461  mapIntFloat.clear();
462  for(unsigned int i=0; i<info.likelihoodKeys.size() && i<info.likelihoodValues.size(); ++i)
463  {
464  mapIntFloat.insert(std::pair<int, float>(info.likelihoodKeys.at(i), info.likelihoodValues.at(i)));
465  }
466  stat.setLikelihood(mapIntFloat);
467 
468  mapIntFloat.clear();
469  for(unsigned int i=0; i<info.rawLikelihoodKeys.size() && i<info.rawLikelihoodValues.size(); ++i)
470  {
471  mapIntFloat.insert(std::pair<int, float>(info.rawLikelihoodKeys.at(i), info.rawLikelihoodValues.at(i)));
472  }
473  stat.setRawLikelihood(mapIntFloat);
474 
475  std::map<int, int> mapIntInt;
476  for(unsigned int i=0; i<info.weightsKeys.size() && i<info.weightsValues.size(); ++i)
477  {
478  mapIntInt.insert(std::pair<int, int>(info.weightsKeys.at(i), info.weightsValues.at(i)));
479  }
480  stat.setWeights(mapIntInt);
481 
482  std::map<int, std::string> mapIntStr;
483  for(unsigned int i=0; i<info.labelsKeys.size() && i<info.labelsValues.size(); ++i)
484  {
485  mapIntStr.insert(std::pair<int, std::string>(info.labelsKeys.at(i), info.labelsValues.at(i)));
486  }
487  stat.setLabels(mapIntStr);
488 
489  stat.setLocalPath(info.localPath);
490  stat.setCurrentGoalId(info.currentGoalId);
491 
492  // Statistics data
493  for(unsigned int i=0; i<info.statsKeys.size() && i<info.statsValues.size(); i++)
494  {
495  stat.addStatistic(info.statsKeys.at(i), info.statsValues.at(i));
496  }
497 }
498 
499 void infoToROS(const rtabmap::Statistics & stats, rtabmap_ros::Info & info)
500 {
501  info.refId = stats.refImageId();
502  info.loopClosureId = stats.loopClosureId();
503  info.proximityDetectionId = stats.proximityDetectionId();
504  info.landmarkId = static_cast<int>(uValue(stats.data(), rtabmap::Statistics::kLoopLandmark_detected(), 0.0f));
505 
506  rtabmap_ros::transformToGeometryMsg(stats.loopClosureTransform(), info.loopClosureTransform);
507 
508  // Detailed info
509  if(stats.extended())
510  {
511  //wmState
512  info.wmState = stats.wmState();
513 
514  //Posterior, likelihood, childCount
515  info.posteriorKeys = uKeys(stats.posterior());
516  info.posteriorValues = uValues(stats.posterior());
517  info.likelihoodKeys = uKeys(stats.likelihood());
518  info.likelihoodValues = uValues(stats.likelihood());
519  info.rawLikelihoodKeys = uKeys(stats.rawLikelihood());
520  info.rawLikelihoodValues = uValues(stats.rawLikelihood());
521  info.weightsKeys = uKeys(stats.weights());
522  info.weightsValues = uValues(stats.weights());
523  info.labelsKeys = uKeys(stats.labels());
524  info.labelsValues = uValues(stats.labels());
525  info.localPath = stats.localPath();
526  info.currentGoalId = stats.currentGoalId();
527 
528  // Statistics data
529  info.statsKeys = uKeys(stats.data());
530  info.statsValues = uValues(stats.data());
531  }
532 }
533 
534 rtabmap::Link linkFromROS(const rtabmap_ros::Link & msg)
535 {
536  cv::Mat information = cv::Mat(6,6,CV_64FC1, (void*)msg.information.data()).clone();
537  return rtabmap::Link(msg.fromId, msg.toId, (rtabmap::Link::Type)msg.type, transformFromGeometryMsg(msg.transform), information);
538 }
539 
540 void linkToROS(const rtabmap::Link & link, rtabmap_ros::Link & msg)
541 {
542  msg.fromId = link.from();
543  msg.toId = link.to();
544  msg.type = link.type();
545  if(link.infMatrix().type() == CV_64FC1 && link.infMatrix().cols == 6 && link.infMatrix().rows == 6)
546  {
547  memcpy(msg.information.data(), link.infMatrix().data, 36*sizeof(double));
548  }
549  transformToGeometryMsg(link.transform(), msg.transform);
550 }
551 
552 cv::KeyPoint keypointFromROS(const rtabmap_ros::KeyPoint & msg)
553 {
554  return cv::KeyPoint(msg.pt.x, msg.pt.y, msg.size, msg.angle, msg.response, msg.octave, msg.class_id);
555 }
556 
557 void keypointToROS(const cv::KeyPoint & kpt, rtabmap_ros::KeyPoint & msg)
558 {
559  msg.angle = kpt.angle;
560  msg.class_id = kpt.class_id;
561  msg.octave = kpt.octave;
562  msg.pt.x = kpt.pt.x;
563  msg.pt.y = kpt.pt.y;
564  msg.response = kpt.response;
565  msg.size = kpt.size;
566 }
567 
568 std::vector<cv::KeyPoint> keypointsFromROS(const std::vector<rtabmap_ros::KeyPoint> & msg)
569 {
570  std::vector<cv::KeyPoint> v(msg.size());
571  for(unsigned int i=0; i<msg.size(); ++i)
572  {
573  v[i] = keypointFromROS(msg[i]);
574  }
575  return v;
576 }
577 
578 void keypointsFromROS(const std::vector<rtabmap_ros::KeyPoint> & msg, std::vector<cv::KeyPoint> & kpts, int xShift)
579 {
580  size_t outCurrentIndex = kpts.size();
581  kpts.resize(kpts.size()+msg.size());
582  for(unsigned int i=0; i<msg.size(); ++i)
583  {
584  kpts[outCurrentIndex+i] = keypointFromROS(msg[i]);
585  kpts[outCurrentIndex+i].pt.x += xShift;
586  }
587 }
588 
589 void keypointsToROS(const std::vector<cv::KeyPoint> & kpts, std::vector<rtabmap_ros::KeyPoint> & msg)
590 {
591  msg.resize(kpts.size());
592  for(unsigned int i=0; i<msg.size(); ++i)
593  {
594  keypointToROS(kpts[i], msg[i]);
595  }
596 }
597 
598 rtabmap::GlobalDescriptor globalDescriptorFromROS(const rtabmap_ros::GlobalDescriptor & msg)
599 {
600  return rtabmap::GlobalDescriptor(msg.type, rtabmap::uncompressData(msg.data), rtabmap::uncompressData(msg.info));
601 }
602 
603 void globalDescriptorToROS(const rtabmap::GlobalDescriptor & desc, rtabmap_ros::GlobalDescriptor & msg)
604 {
605  msg.type = desc.type();
606  msg.info = rtabmap::compressData(desc.info());
607  msg.data = rtabmap::compressData(desc.data());
608 }
609 
610 std::vector<rtabmap::GlobalDescriptor> globalDescriptorsFromROS(const std::vector<rtabmap_ros::GlobalDescriptor> & msg)
611 {
612  if(!msg.empty())
613  {
614  std::vector<rtabmap::GlobalDescriptor> v(msg.size());
615  for(unsigned int i=0; i<msg.size(); ++i)
616  {
617  v[i] = globalDescriptorFromROS(msg[i]);
618  }
619  return v;
620  }
621  return std::vector<rtabmap::GlobalDescriptor>();
622 }
623 
624 void globalDescriptorsToROS(const std::vector<rtabmap::GlobalDescriptor> & desc, std::vector<rtabmap_ros::GlobalDescriptor> & msg)
625 {
626  msg.clear();
627  if(!desc.empty())
628  {
629  msg.resize(desc.size());
630  for(unsigned int i=0; i<msg.size(); ++i)
631  {
632  globalDescriptorToROS(desc[i], msg[i]);
633  }
634  }
635 }
636 
637 rtabmap::EnvSensor envSensorFromROS(const rtabmap_ros::EnvSensor & msg)
638 {
639  return rtabmap::EnvSensor((rtabmap::EnvSensor::Type)msg.type, msg.value, timestampFromROS(msg.header.stamp));
640 }
641 
642 void envSensorToROS(const rtabmap::EnvSensor & sensor, rtabmap_ros::EnvSensor & msg)
643 {
644  msg.type = sensor.type();
645  msg.value = sensor.value();
646  msg.header.stamp = ros::Time(sensor.stamp());
647 }
648 
649 rtabmap::EnvSensors envSensorsFromROS(const std::vector<rtabmap_ros::EnvSensor> & msg)
650 {
652  if(!msg.empty())
653  {
654  for(unsigned int i=0; i<msg.size(); ++i)
655  {
657  v.insert(std::make_pair(s.type(), envSensorFromROS(msg[i])));
658  }
659  }
660  return v;
661 }
662 
663 void envSensorsToROS(const rtabmap::EnvSensors & sensors, std::vector<rtabmap_ros::EnvSensor> & msg)
664 {
665  msg.clear();
666  if(!sensors.empty())
667  {
668  msg.resize(sensors.size());
669  int i=0;
670  for(rtabmap::EnvSensors::const_iterator iter=sensors.begin(); iter!=sensors.end(); ++iter)
671  {
672  envSensorToROS(iter->second, msg[i++]);
673  }
674  }
675 }
676 
677 cv::Point2f point2fFromROS(const rtabmap_ros::Point2f & msg)
678 {
679  return cv::Point2f(msg.x, msg.y);
680 }
681 
682 void point2fToROS(const cv::Point2f & kpt, rtabmap_ros::Point2f & msg)
683 {
684  msg.x = kpt.x;
685  msg.y = kpt.y;
686 }
687 
688 std::vector<cv::Point2f> points2fFromROS(const std::vector<rtabmap_ros::Point2f> & msg)
689 {
690  std::vector<cv::Point2f> v(msg.size());
691  for(unsigned int i=0; i<msg.size(); ++i)
692  {
693  v[i] = point2fFromROS(msg[i]);
694  }
695  return v;
696 }
697 
698 void points2fToROS(const std::vector<cv::Point2f> & kpts, std::vector<rtabmap_ros::Point2f> & msg)
699 {
700  msg.resize(kpts.size());
701  for(unsigned int i=0; i<msg.size(); ++i)
702  {
703  point2fToROS(kpts[i], msg[i]);
704  }
705 }
706 
707 cv::Point3f point3fFromROS(const rtabmap_ros::Point3f & msg)
708 {
709  return cv::Point3f(msg.x, msg.y, msg.z);
710 }
711 
712 void point3fToROS(const cv::Point3f & pt, rtabmap_ros::Point3f & msg)
713 {
714  msg.x = pt.x;
715  msg.y = pt.y;
716  msg.z = pt.z;
717 }
718 
719 std::vector<cv::Point3f> points3fFromROS(const std::vector<rtabmap_ros::Point3f> & msg, const rtabmap::Transform & transform)
720 {
721  bool transformPoints = !transform.isNull() && !transform.isIdentity();
722  std::vector<cv::Point3f> v(msg.size());
723  for(unsigned int i=0; i<msg.size(); ++i)
724  {
725  v[i] = point3fFromROS(msg[i]);
726  if(transformPoints)
727  {
728  v[i] = rtabmap::util3d::transformPoint(v[i], transform);
729  }
730  }
731  return v;
732 }
733 
734 void points3fFromROS(const std::vector<rtabmap_ros::Point3f> & msg, std::vector<cv::Point3f> & points3, const rtabmap::Transform & transform)
735 {
736  size_t currentIndex = points3.size();
737  points3.resize(points3.size()+msg.size());
738  bool transformPoint = !transform.isNull() && !transform.isIdentity();
739  for(unsigned int i=0; i<msg.size(); ++i)
740  {
741  points3[currentIndex+i] = point3fFromROS(msg[i]);
742  if(transformPoint)
743  {
744  points3[currentIndex+i] = rtabmap::util3d::transformPoint(points3[currentIndex+i], transform);
745  }
746  }
747 }
748 
749 void points3fToROS(const std::vector<cv::Point3f> & pts, std::vector<rtabmap_ros::Point3f> & msg, const rtabmap::Transform & transform)
750 {
751  msg.resize(pts.size());
752  bool transformPoints = !transform.isNull() && !transform.isIdentity();
753  for(unsigned int i=0; i<msg.size(); ++i)
754  {
755  if(transformPoints)
756  {
757  cv::Point3f pt = rtabmap::util3d::transformPoint(pts[i], transform);
758  point3fToROS(pt, msg[i]);
759  }
760  else
761  {
762  point3fToROS(pts[i], msg[i]);
763  }
764  }
765 }
766 
768  const sensor_msgs::CameraInfo & camInfo,
769  const rtabmap::Transform & localTransform)
770 {
771  cv:: Mat K;
772  UASSERT(camInfo.K.empty() || camInfo.K.size() == 9);
773  if(!camInfo.K.empty())
774  {
775  K = cv::Mat(3, 3, CV_64FC1);
776  memcpy(K.data, camInfo.K.elems, 9*sizeof(double));
777  }
778 
779  cv::Mat D;
780  if(camInfo.D.size())
781  {
782  if(camInfo.D.size()>=4 &&
783  (uStrContains(camInfo.distortion_model, "fisheye") ||
784  uStrContains(camInfo.distortion_model, "equidistant")))
785  {
786  D = cv::Mat::zeros(1, 6, CV_64FC1);
787  D.at<double>(0,0) = camInfo.D[0];
788  D.at<double>(0,1) = camInfo.D[1];
789  D.at<double>(0,4) = camInfo.D[2];
790  D.at<double>(0,5) = camInfo.D[3];
791  }
792  else
793  {
794  D = cv::Mat(1, camInfo.D.size(), CV_64FC1);
795  memcpy(D.data, camInfo.D.data(), D.cols*sizeof(double));
796  }
797  }
798 
799  cv:: Mat R;
800  UASSERT(camInfo.R.empty() || camInfo.R.size() == 9);
801  if(!camInfo.R.empty())
802  {
803  R = cv::Mat(3, 3, CV_64FC1);
804  memcpy(R.data, camInfo.R.elems, 9*sizeof(double));
805  }
806 
807  cv:: Mat P;
808  UASSERT(camInfo.P.empty() || camInfo.P.size() == 12);
809  if(!camInfo.P.empty())
810  {
811  P = cv::Mat(3, 4, CV_64FC1);
812  memcpy(P.data, camInfo.P.elems, 12*sizeof(double));
813  }
814 
815  return rtabmap::CameraModel(
816  "ros",
817  cv::Size(camInfo.width, camInfo.height),
818  K, D, R, P,
819  localTransform);
820 }
822  const rtabmap::CameraModel & model,
823  sensor_msgs::CameraInfo & camInfo)
824 {
825  UASSERT(model.K_raw().empty() || model.K_raw().total() == 9);
826  if(model.K_raw().empty())
827  {
828  memset(camInfo.K.elems, 0.0, 9*sizeof(double));
829  }
830  else
831  {
832  memcpy(camInfo.K.elems, model.K_raw().data, 9*sizeof(double));
833  }
834 
835  if(camInfo.D.size() == 6)
836  {
837  camInfo.D = std::vector<double>(4);
838  camInfo.D[0] = model.D_raw().at<double>(0,0);
839  camInfo.D[1] = model.D_raw().at<double>(0,1);
840  camInfo.D[2] = model.D_raw().at<double>(0,4);
841  camInfo.D[3] = model.D_raw().at<double>(0,5);
842  camInfo.distortion_model = "equidistant"; // fisheye
843  }
844  else
845  {
846  camInfo.D = std::vector<double>(model.D_raw().cols);
847  memcpy(camInfo.D.data(), model.D_raw().data, model.D_raw().cols*sizeof(double));
848  if(camInfo.D.size() > 5)
849  {
850  camInfo.distortion_model = "rational_polynomial";
851  }
852  else
853  {
854  camInfo.distortion_model = "plumb_bob";
855  }
856  }
857 
858  UASSERT(model.R().empty() || model.R().total() == 9);
859  if(model.R().empty())
860  {
861  memset(camInfo.R.elems, 0.0, 9*sizeof(double));
862  }
863  else
864  {
865  memcpy(camInfo.R.elems, model.R().data, 9*sizeof(double));
866  }
867 
868  UASSERT(model.P().empty() || model.P().total() == 12);
869  if(model.P().empty())
870  {
871  memset(camInfo.P.elems, 0.0, 12*sizeof(double));
872  }
873  else
874  {
875  memcpy(camInfo.P.elems, model.P().data, 12*sizeof(double));
876  }
877 
878  camInfo.binning_x = 1;
879  camInfo.binning_y = 1;
880  camInfo.roi.width = model.imageWidth();
881  camInfo.roi.height = model.imageHeight();
882 
883  camInfo.width = model.imageWidth();
884  camInfo.height = model.imageHeight();
885 }
887  const sensor_msgs::CameraInfo & leftCamInfo,
888  const sensor_msgs::CameraInfo & rightCamInfo,
889  const rtabmap::Transform & localTransform,
890  const rtabmap::Transform & stereoTransform)
891 {
893  "ros",
894  cameraModelFromROS(leftCamInfo, localTransform),
895  cameraModelFromROS(rightCamInfo, localTransform),
896  stereoTransform);
897 }
899  const sensor_msgs::CameraInfo & leftCamInfo,
900  const sensor_msgs::CameraInfo & rightCamInfo,
901  const std::string & frameId,
903  double waitForTransform)
904 {
905  rtabmap::Transform localTransform = getTransform(
906  frameId,
907  leftCamInfo.header.frame_id,
908  leftCamInfo.header.stamp,
909  listener,
910  waitForTransform);
911  if(localTransform.isNull())
912  {
914  }
915 
916  rtabmap::Transform stereoTransform = getTransform(
917  leftCamInfo.header.frame_id,
918  rightCamInfo.header.frame_id,
919  leftCamInfo.header.stamp,
920  listener,
921  waitForTransform);
922  if(stereoTransform.isNull())
923  {
925  }
926  return stereoCameraModelFromROS(leftCamInfo, rightCamInfo, localTransform, stereoTransform);
927 }
928 
930  const rtabmap_ros::MapData & msg,
931  std::map<int, rtabmap::Transform> & poses,
932  std::multimap<int, rtabmap::Link> & links,
933  std::map<int, rtabmap::Signature> & signatures,
934  rtabmap::Transform & mapToOdom)
935 {
936  //optimized graph
937  mapGraphFromROS(msg.graph, poses, links, mapToOdom);
938 
939  //Data
940  for(unsigned int i=0; i<msg.nodes.size(); ++i)
941  {
942  signatures.insert(std::make_pair(msg.nodes[i].id, nodeDataFromROS(msg.nodes[i])));
943  }
944 }
946  const std::map<int, rtabmap::Transform> & poses,
947  const std::multimap<int, rtabmap::Link> & links,
948  const std::map<int, rtabmap::Signature> & signatures,
949  const rtabmap::Transform & mapToOdom,
950  rtabmap_ros::MapData & msg)
951 {
952  //Optimized graph
953  mapGraphToROS(poses, links, mapToOdom, msg.graph);
954 
955  //Data
956  msg.nodes.resize(signatures.size());
957  int index=0;
958  for(std::multimap<int, rtabmap::Signature>::const_iterator iter = signatures.begin();
959  iter!=signatures.end();
960  ++iter)
961  {
962  nodeDataToROS(iter->second, msg.nodes[index++]);
963  }
964 }
965 
967  const rtabmap_ros::MapGraph & msg,
968  std::map<int, rtabmap::Transform> & poses,
969  std::multimap<int, rtabmap::Link> & links,
970  rtabmap::Transform & mapToOdom)
971 {
972  //optimized graph
973  UASSERT(msg.posesId.size() == msg.poses.size());
974  for(unsigned int i=0; i<msg.posesId.size(); ++i)
975  {
976  poses.insert(std::make_pair(msg.posesId[i], rtabmap_ros::transformFromPoseMsg(msg.poses[i])));
977  }
978  for(unsigned int i=0; i<msg.links.size(); ++i)
979  {
981  links.insert(std::make_pair(msg.links[i].fromId, linkFromROS(msg.links[i])));
982  }
983  mapToOdom = transformFromGeometryMsg(msg.mapToOdom);
984 }
986  const std::map<int, rtabmap::Transform> & poses,
987  const std::multimap<int, rtabmap::Link> & links,
988  const rtabmap::Transform & mapToOdom,
989  rtabmap_ros::MapGraph & msg)
990 {
991  //Optimized graph
992  msg.posesId.resize(poses.size());
993  msg.poses.resize(poses.size());
994  int index = 0;
995  for(std::map<int, rtabmap::Transform>::const_iterator iter = poses.begin();
996  iter != poses.end();
997  ++iter)
998  {
999  msg.posesId[index] = iter->first;
1000  transformToPoseMsg(iter->second, msg.poses[index]);
1001  ++index;
1002  }
1003 
1004  msg.links.resize(links.size());
1005  index=0;
1006  for(std::multimap<int, rtabmap::Link>::const_iterator iter = links.begin();
1007  iter!=links.end();
1008  ++iter)
1009  {
1010  linkToROS(iter->second, msg.links[index++]);
1011  }
1012 
1013  transformToGeometryMsg(mapToOdom, msg.mapToOdom);
1014 }
1015 
1016 rtabmap::Signature nodeDataFromROS(const rtabmap_ros::NodeData & msg)
1017 {
1018  //Features stuff...
1019  std::multimap<int, int> words;
1020  std::vector<cv::KeyPoint> wordsKpts;
1021  std::vector<cv::Point3f> words3D;
1022  cv::Mat wordsDescriptors = rtabmap::uncompressData(msg.wordDescriptors);
1023 
1024  if(!msg.wordKpts.empty() && msg.wordKpts.size() != msg.wordIds.size())
1025  {
1026  ROS_ERROR("Word IDs and 2D keypoints should be the same size (%d, %d)!", (int)msg.wordIds.size(), (int)msg.wordKpts.size());
1027  }
1028  if(!msg.wordPts.empty() && msg.wordPts.size() != msg.wordIds.size())
1029  {
1030  ROS_ERROR("Word IDs and 3D points should be the same size (%d, %d)!", (int)msg.wordIds.size(), (int)msg.wordPts.size());
1031  }
1032  if(wordsDescriptors.rows != (int)msg.wordIds.size())
1033  {
1034  ROS_ERROR("Word IDs and descriptors should be the same size (%d, %d)!", (int)msg.wordIds.size(), wordsDescriptors.rows);
1035  wordsDescriptors = cv::Mat();
1036  }
1037 
1038  for(unsigned int i=0; i<msg.wordIds.size(); ++i)
1039  {
1040  words.insert(std::make_pair(msg.wordIds.at(i), words.size())); // ID to index
1041  if(msg.wordIds.size() == msg.wordKpts.size())
1042  {
1043  cv::KeyPoint pt = keypointFromROS(msg.wordKpts.at(i));
1044  wordsKpts.push_back(pt);
1045  }
1046  if(msg.wordIds.size() == msg.wordPts.size())
1047  {
1048  words3D.push_back(point3fFromROS(msg.wordPts[i]));
1049  }
1050  }
1051 
1052  rtabmap::StereoCameraModel stereoModel;
1053  std::vector<rtabmap::CameraModel> models;
1054  if(msg.baseline > 0.0f)
1055  {
1056  // stereo model
1057  if(msg.fx.size() == 1 &&
1058  msg.fy.size() == 1 &&
1059  msg.cx.size() == 1 &&
1060  msg.cy.size() == 1 &&
1061  msg.width.size() == 1 &&
1062  msg.height.size() == 1 &&
1063  msg.localTransform.size() == 1)
1064  {
1065  stereoModel = rtabmap::StereoCameraModel(
1066  msg.fx[0],
1067  msg.fy[0],
1068  msg.cx[0],
1069  msg.cy[0],
1070  msg.baseline,
1071  transformFromGeometryMsg(msg.localTransform[0]),
1072  cv::Size(msg.width[0], msg.height[0]));
1073  }
1074  }
1075  else
1076  {
1077  // multi-cameras model
1078  if(msg.fx.size() &&
1079  msg.fx.size() == msg.fy.size() &&
1080  msg.fx.size() == msg.cx.size() &&
1081  msg.fx.size() == msg.cy.size() &&
1082  msg.fx.size() == msg.localTransform.size())
1083  {
1084  for(unsigned int i=0; i<msg.fx.size(); ++i)
1085  {
1086  if(msg.fx[i] == 0)
1087  {
1088  models.push_back(rtabmap::CameraModel());
1089  }
1090  else
1091  {
1092  models.push_back(rtabmap::CameraModel(
1093  msg.fx[i],
1094  msg.fy[i],
1095  msg.cx[i],
1096  msg.cy[i],
1097  transformFromGeometryMsg(msg.localTransform[i]),
1098  0.0,
1099  cv::Size(msg.width[i], msg.height[i])));
1100  }
1101  }
1102  }
1103  }
1104 
1106  msg.id,
1107  msg.mapId,
1108  msg.weight,
1109  msg.stamp,
1110  msg.label,
1111  transformFromPoseMsg(msg.pose),
1112  transformFromPoseMsg(msg.groundTruthPose),
1113  stereoModel.isValidForProjection()?
1116  msg.laserScanMaxPts,
1117  msg.laserScanMaxRange,
1118  (rtabmap::LaserScan::Format)msg.laserScanFormat,
1119  transformFromGeometryMsg(msg.laserScanLocalTransform)),
1120  compressedMatFromBytes(msg.image),
1121  compressedMatFromBytes(msg.depth),
1122  stereoModel,
1123  msg.id,
1124  msg.stamp,
1125  compressedMatFromBytes(msg.userData)):
1128  msg.laserScanMaxPts,
1129  msg.laserScanMaxRange,
1130  (rtabmap::LaserScan::Format)msg.laserScanFormat,
1131  transformFromGeometryMsg(msg.laserScanLocalTransform)),
1132  compressedMatFromBytes(msg.image),
1133  compressedMatFromBytes(msg.depth),
1134  models,
1135  msg.id,
1136  msg.stamp,
1137  compressedMatFromBytes(msg.userData)));
1138  s.setWords(words, wordsKpts, words3D, wordsDescriptors);
1139  s.sensorData().setGlobalDescriptors(rtabmap_ros::globalDescriptorsFromROS(msg.globalDescriptors));
1140  s.sensorData().setEnvSensors(rtabmap_ros::envSensorsFromROS(msg.env_sensors));
1141  s.sensorData().setOccupancyGrid(
1142  compressedMatFromBytes(msg.grid_ground),
1143  compressedMatFromBytes(msg.grid_obstacles),
1144  compressedMatFromBytes(msg.grid_empty_cells),
1145  msg.grid_cell_size,
1146  point3fFromROS(msg.grid_view_point));
1147  s.sensorData().setGPS(rtabmap::GPS(msg.gps.stamp, msg.gps.longitude, msg.gps.latitude, msg.gps.altitude, msg.gps.error, msg.gps.bearing));
1148  return s;
1149 }
1150 void nodeDataToROS(const rtabmap::Signature & signature, rtabmap_ros::NodeData & msg)
1151 {
1152  // add data
1153  msg.id = signature.id();
1154  msg.mapId = signature.mapId();
1155  msg.weight = signature.getWeight();
1156  msg.stamp = signature.getStamp();
1157  msg.label = signature.getLabel();
1158  transformToPoseMsg(signature.getPose(), msg.pose);
1159  transformToPoseMsg(signature.getGroundTruthPose(), msg.groundTruthPose);
1160  msg.gps.stamp = signature.sensorData().gps().stamp();
1161  msg.gps.longitude = signature.sensorData().gps().longitude();
1162  msg.gps.latitude = signature.sensorData().gps().latitude();
1163  msg.gps.altitude = signature.sensorData().gps().altitude();
1164  msg.gps.error = signature.sensorData().gps().error();
1165  msg.gps.bearing = signature.sensorData().gps().bearing();
1166  compressedMatToBytes(signature.sensorData().imageCompressed(), msg.image);
1167  compressedMatToBytes(signature.sensorData().depthOrRightCompressed(), msg.depth);
1168  compressedMatToBytes(signature.sensorData().laserScanCompressed().data(), msg.laserScan);
1169  compressedMatToBytes(signature.sensorData().userDataCompressed(), msg.userData);
1170  compressedMatToBytes(signature.sensorData().gridGroundCellsCompressed(), msg.grid_ground);
1171  compressedMatToBytes(signature.sensorData().gridObstacleCellsCompressed(), msg.grid_obstacles);
1172  compressedMatToBytes(signature.sensorData().gridEmptyCellsCompressed(), msg.grid_empty_cells);
1173  point3fToROS(signature.sensorData().gridViewPoint(), msg.grid_view_point);
1174  msg.grid_cell_size = signature.sensorData().gridCellSize();
1175  msg.laserScanMaxPts = signature.sensorData().laserScanCompressed().maxPoints();
1176  msg.laserScanMaxRange = signature.sensorData().laserScanCompressed().rangeMax();
1177  msg.laserScanFormat = signature.sensorData().laserScanCompressed().format();
1178  transformToGeometryMsg(signature.sensorData().laserScanCompressed().localTransform(), msg.laserScanLocalTransform);
1179  msg.baseline = 0;
1180  if(signature.sensorData().cameraModels().size())
1181  {
1182  msg.fx.resize(signature.sensorData().cameraModels().size());
1183  msg.fy.resize(signature.sensorData().cameraModels().size());
1184  msg.cx.resize(signature.sensorData().cameraModels().size());
1185  msg.cy.resize(signature.sensorData().cameraModels().size());
1186  msg.width.resize(signature.sensorData().cameraModels().size());
1187  msg.height.resize(signature.sensorData().cameraModels().size());
1188  msg.localTransform.resize(signature.sensorData().cameraModels().size());
1189  for(unsigned int i=0; i<signature.sensorData().cameraModels().size(); ++i)
1190  {
1191  msg.fx[i] = signature.sensorData().cameraModels()[i].fx();
1192  msg.fy[i] = signature.sensorData().cameraModels()[i].fy();
1193  msg.cx[i] = signature.sensorData().cameraModels()[i].cx();
1194  msg.cy[i] = signature.sensorData().cameraModels()[i].cy();
1195  msg.width[i] = signature.sensorData().cameraModels()[i].imageWidth();
1196  msg.height[i] = signature.sensorData().cameraModels()[i].imageHeight();
1197  transformToGeometryMsg(signature.sensorData().cameraModels()[i].localTransform(), msg.localTransform[i]);
1198  }
1199  }
1200  else if(signature.sensorData().stereoCameraModel().isValidForProjection())
1201  {
1202  msg.fx.push_back(signature.sensorData().stereoCameraModel().left().fx());
1203  msg.fy.push_back(signature.sensorData().stereoCameraModel().left().fy());
1204  msg.cx.push_back(signature.sensorData().stereoCameraModel().left().cx());
1205  msg.cy.push_back(signature.sensorData().stereoCameraModel().left().cy());
1206  msg.width.push_back(signature.sensorData().stereoCameraModel().left().imageWidth());
1207  msg.height.push_back(signature.sensorData().stereoCameraModel().left().imageHeight());
1208  msg.baseline = signature.sensorData().stereoCameraModel().baseline();
1209  msg.localTransform.resize(1);
1210  transformToGeometryMsg(signature.sensorData().stereoCameraModel().left().localTransform(), msg.localTransform[0]);
1211  }
1212 
1213  //Features stuff...
1214  msg.wordIds = uKeys(signature.getWords());
1215  if(!signature.getWordsKpts().empty())
1216  {
1217  if(msg.wordIds.size() == signature.getWordsKpts().size())
1218  {
1219  msg.wordKpts.resize(signature.getWordsKpts().size());
1220  }
1221  else
1222  {
1223  ROS_ERROR("Word IDs and 2D keypoints must have the same size (%d vs %d)!",
1224  (int)signature.getWords().size(),
1225  (int)signature.getWordsKpts().size());
1226  }
1227  }
1228 
1229  if(!signature.getWords3().empty())
1230  {
1231  if(msg.wordIds.size() == signature.getWords3().size())
1232  {
1233  msg.wordPts.resize(signature.getWords3().size());
1234  }
1235  else
1236  {
1237  ROS_ERROR("Word IDs and 3D points must have the same size (%d vs %d)!",
1238  (int)signature.getWords().size(),
1239  (int)signature.getWords3().size());
1240  }
1241  }
1242  if(!msg.wordKpts.empty() || !msg.wordPts.empty())
1243  {
1244  for(size_t i=0; i<msg.wordIds.size(); ++i)
1245  {
1246  if(!msg.wordKpts.empty())
1247  keypointToROS(signature.getWordsKpts().at(i), msg.wordKpts.at(i));
1248  if(!msg.wordPts.empty())
1249  point3fToROS(signature.getWords3().at(i), msg.wordPts[i]);
1250  }
1251  }
1252 
1253  if(!signature.getWordsDescriptors().empty())
1254  {
1255  if(signature.getWordsDescriptors().rows == (int)signature.getWords().size())
1256  {
1257  msg.wordDescriptors = rtabmap::compressData(signature.getWordsDescriptors());
1258  }
1259  else
1260  {
1261  ROS_ERROR("Word IDs and descriptors must have the same size (%d vs %d)!",
1262  (int)signature.getWords().size(),
1263  signature.getWordsDescriptors().rows);
1264  }
1265  }
1266 
1267  rtabmap_ros::globalDescriptorsToROS(signature.sensorData().globalDescriptors(), msg.globalDescriptors);
1268  rtabmap_ros::envSensorsToROS(signature.sensorData().envSensors(), msg.env_sensors);
1269 }
1270 
1271 rtabmap::Signature nodeInfoFromROS(const rtabmap_ros::NodeData & msg)
1272 {
1274  msg.id,
1275  msg.mapId,
1276  msg.weight,
1277  msg.stamp,
1278  msg.label,
1279  transformFromPoseMsg(msg.pose),
1280  transformFromPoseMsg(msg.groundTruthPose));
1281  return s;
1282 }
1283 void nodeInfoToROS(const rtabmap::Signature & signature, rtabmap_ros::NodeData & msg)
1284 {
1285  // add data
1286  msg.id = signature.id();
1287  msg.mapId = signature.mapId();
1288  msg.weight = signature.getWeight();
1289  msg.stamp = signature.getStamp();
1290  msg.label = signature.getLabel();
1291  transformToPoseMsg(signature.getPose(), msg.pose);
1292  transformToPoseMsg(signature.getGroundTruthPose(), msg.groundTruthPose);
1293 }
1294 
1295 std::map<std::string, float> odomInfoToStatistics(const rtabmap::OdometryInfo & info)
1296 {
1297  std::map<std::string, float> stats;
1298 
1299  stats.insert(std::make_pair("Odometry/TimeRegistration/ms", info.reg.totalTime*1000.0f));
1300  stats.insert(std::make_pair("Odometry/RAM_usage/MB", info.memoryUsage));
1301 
1302  // Based on rtabmap/MainWindow.cpp
1303  stats.insert(std::make_pair("Odometry/Features/", info.features));
1304  stats.insert(std::make_pair("Odometry/Matches/", info.reg.matches));
1305  stats.insert(std::make_pair("Odometry/MatchesRatio/", info.features<=0?0.0f:float(info.reg.inliers)/float(info.features)));
1306  stats.insert(std::make_pair("Odometry/Inliers/", info.reg.inliers));
1307  stats.insert(std::make_pair("Odometry/InliersMeanDistance/m", info.reg.inliersMeanDistance));
1308  stats.insert(std::make_pair("Odometry/InliersDistribution/", info.reg.inliersDistribution));
1309  stats.insert(std::make_pair("Odometry/InliersRatio/", info.reg.inliers));
1310  stats.insert(std::make_pair("Odometry/ICPInliersRatio/", info.reg.icpInliersRatio));
1311  stats.insert(std::make_pair("Odometry/ICPRotation/rad", info.reg.icpRotation));
1312  stats.insert(std::make_pair("Odometry/ICPTranslation/m", info.reg.icpTranslation));
1313  stats.insert(std::make_pair("Odometry/ICPStructuralComplexity/", info.reg.icpStructuralComplexity));
1314  stats.insert(std::make_pair("Odometry/ICPStructuralDistribution/", info.reg.icpStructuralDistribution));
1315  stats.insert(std::make_pair("Odometry/ICPCorrespondences/", info.reg.icpCorrespondences));
1316  stats.insert(std::make_pair("Odometry/StdDevLin/", sqrt((float)info.reg.covariance.at<double>(0,0))));
1317  stats.insert(std::make_pair("Odometry/StdDevAng/", sqrt((float)info.reg.covariance.at<double>(5,5))));
1318  stats.insert(std::make_pair("Odometry/VarianceLin/", (float)info.reg.covariance.at<double>(0,0)));
1319  stats.insert(std::make_pair("Odometry/VarianceAng/", (float)info.reg.covariance.at<double>(5,5)));
1320  stats.insert(std::make_pair("Odometry/TimeEstimation/ms", info.timeEstimation*1000.0f));
1321  stats.insert(std::make_pair("Odometry/TimeFiltering/ms", info.timeParticleFiltering*1000.0f));
1322  stats.insert(std::make_pair("Odometry/LocalMapSize/", info.localMapSize));
1323  stats.insert(std::make_pair("Odometry/LocalScanMapSize/", info.localScanMapSize));
1324  stats.insert(std::make_pair("Odometry/LocalKeyFrames/", info.localKeyFrames));
1325  stats.insert(std::make_pair("Odometry/LocalBundleOutliers/", info.localBundleOutliers));
1326  stats.insert(std::make_pair("Odometry/LocalBundleConstraints/", info.localBundleConstraints));
1327  stats.insert(std::make_pair("Odometry/LocalBundleTime/ms", info.localBundleTime*1000.0f));
1328  stats.insert(std::make_pair("Odometry/KeyFrameAdded/", info.keyFrameAdded?1.0f:0.0f));
1329  stats.insert(std::make_pair("Odometry/Interval/ms", (float)info.interval));
1330  stats.insert(std::make_pair("Odometry/Distance/m", info.distanceTravelled));
1331 
1332  float x,y,z,roll,pitch,yaw;
1333  float dist = 0.0f, speed=0.0f;
1334  if(!info.transform.isNull())
1335  {
1336  info.transform.getTranslationAndEulerAngles(x,y,z,roll,pitch,yaw);
1337  dist = info.transform.getNorm();
1338  stats.insert(std::make_pair("Odometry/T/m", dist));
1339  stats.insert(std::make_pair("Odometry/Tx/m", x));
1340  stats.insert(std::make_pair("Odometry/Ty/m", y));
1341  stats.insert(std::make_pair("Odometry/Tz/m", z));
1342  stats.insert(std::make_pair("Odometry/Troll/deg", roll*180.0/CV_PI));
1343  stats.insert(std::make_pair("Odometry/Tpitch/deg", pitch*180.0/CV_PI));
1344  stats.insert(std::make_pair("Odometry/Tyaw/deg", yaw*180.0/CV_PI));
1345 
1346  if(info.interval>0.0)
1347  {
1348  speed = dist/info.interval;
1349  stats.insert(std::make_pair("Odometry/Speed/kph", speed*3.6));
1350  stats.insert(std::make_pair("Odometry/Speed/mph", speed*2.237));
1351  stats.insert(std::make_pair("Odometry/Speed/mps", speed));
1352  }
1353  }
1354  if(!info.transformGroundTruth.isNull())
1355  {
1356  if(!info.transform.isNull())
1357  {
1359  stats.insert(std::make_pair("Odometry/TG_error_lin/m", diff.getNorm()));
1360  stats.insert(std::make_pair("Odometry/TG_error_ang/deg", diff.getAngle()*180.0/CV_PI));
1361  }
1362 
1363  info.transformGroundTruth.getTranslationAndEulerAngles(x,y,z,roll,pitch,yaw);
1364  dist = info.transformGroundTruth.getNorm();
1365  stats.insert(std::make_pair("Odometry/TG/m", dist));
1366  stats.insert(std::make_pair("Odometry/TGx/m", x));
1367  stats.insert(std::make_pair("Odometry/TGy/m", y));
1368  stats.insert(std::make_pair("Odometry/TGz/m", z));
1369  stats.insert(std::make_pair("Odometry/TGroll/deg", roll*180.0/CV_PI));
1370  stats.insert(std::make_pair("Odometry/TGpitch/deg", pitch*180.0/CV_PI));
1371  stats.insert(std::make_pair("Odometry/TGyaw/deg", yaw*180.0/CV_PI));
1372 
1373  if(info.interval>0.0)
1374  {
1375  speed = dist/info.interval;
1376  stats.insert(std::make_pair("Odometry/SpeedG/kph", speed*3.6));
1377  stats.insert(std::make_pair("Odometry/SpeedG/mph", speed*2.237));
1378  stats.insert(std::make_pair("Odometry/SpeedG/mps", speed));
1379  }
1380  }
1381  return stats;
1382 }
1383 
1384 rtabmap::OdometryInfo odomInfoFromROS(const rtabmap_ros::OdomInfo & msg)
1385 {
1386  rtabmap::OdometryInfo info;
1387  info.lost = msg.lost;
1388  info.reg.matches = msg.matches;
1389  info.reg.inliers = msg.inliers;
1390  info.reg.icpInliersRatio = msg.icpInliersRatio;
1391  info.reg.icpRotation = msg.icpRotation;
1392  info.reg.icpTranslation = msg.icpTranslation;
1393  info.reg.icpStructuralComplexity = msg.icpStructuralComplexity;
1394  info.reg.icpStructuralDistribution = msg.icpStructuralDistribution;
1395  info.reg.icpCorrespondences = msg.icpCorrespondences;
1396  info.reg.covariance = cv::Mat(6,6,CV_64FC1, (void*)msg.covariance.data()).clone();
1397  info.features = msg.features;
1398  info.localMapSize = msg.localMapSize;
1399  info.localScanMapSize = msg.localScanMapSize;
1400  info.localKeyFrames = msg.localKeyFrames;
1401  info.localBundleOutliers = msg.localBundleOutliers;
1402  info.localBundleConstraints = msg.localBundleConstraints;
1403  info.localBundleTime = msg.localBundleTime;
1404  info.keyFrameAdded = msg.keyFrameAdded;
1405  info.timeEstimation = msg.timeEstimation;
1406  info.timeParticleFiltering = msg.timeParticleFiltering;
1407  info.stamp = msg.stamp;
1408  info.interval = msg.interval;
1409  info.distanceTravelled = msg.distanceTravelled;
1410  info.memoryUsage = msg.memoryUsage;
1411  info.gravityRollError = msg.gravityRollError;
1412  info.gravityPitchError = msg.gravityPitchError;
1413 
1414  info.type = msg.type;
1415 
1416  UASSERT(msg.wordsKeys.size() == msg.wordsValues.size());
1417  for(unsigned int i=0; i<msg.wordsKeys.size(); ++i)
1418  {
1419  info.words.insert(std::make_pair(msg.wordsKeys[i], keypointFromROS(msg.wordsValues[i])));
1420  }
1421 
1422  info.reg.matchesIDs = msg.wordMatches;
1423  info.reg.inliersIDs = msg.wordInliers;
1424 
1425  info.refCorners = points2fFromROS(msg.refCorners);
1426  info.newCorners = points2fFromROS(msg.newCorners);
1427  info.cornerInliers = msg.cornerInliers;
1428 
1429  info.transform = transformFromGeometryMsg(msg.transform);
1430  info.transformFiltered = transformFromGeometryMsg(msg.transformFiltered);
1431  info.transformGroundTruth = transformFromGeometryMsg(msg.transformGroundTruth);
1432  info.guessVelocity = transformFromGeometryMsg(msg.guessVelocity);
1433 
1434  UASSERT(msg.localMapKeys.size() == msg.localMapValues.size());
1435  for(unsigned int i=0; i<msg.localMapKeys.size(); ++i)
1436  {
1437  info.localMap.insert(std::make_pair(msg.localMapKeys[i], point3fFromROS(msg.localMapValues[i])));
1438  }
1439 
1440  info.localScanMap = rtabmap::LaserScan(rtabmap::uncompressData(msg.localScanMap), 0, 0, (rtabmap::LaserScan::Format)msg.localScanMapFormat);
1441  return info;
1442 }
1443 
1444 void odomInfoToROS(const rtabmap::OdometryInfo & info, rtabmap_ros::OdomInfo & msg)
1445 {
1446  msg.lost = info.lost;
1447  msg.matches = info.reg.matches;
1448  msg.inliers = info.reg.inliers;
1449  msg.icpInliersRatio = info.reg.icpInliersRatio;
1450  msg.icpRotation = info.reg.icpRotation;
1451  msg.icpTranslation = info.reg.icpTranslation;
1452  msg.icpStructuralComplexity = info.reg.icpStructuralComplexity;
1453  msg.icpStructuralDistribution = info.reg.icpStructuralDistribution;
1454  msg.icpCorrespondences = info.reg.icpCorrespondences;
1455  if(info.reg.covariance.type() == CV_64FC1 && info.reg.covariance.cols == 6 && info.reg.covariance.rows == 6)
1456  {
1457  memcpy(msg.covariance.data(), info.reg.covariance.data, 36*sizeof(double));
1458  }
1459  msg.features = info.features;
1460  msg.localMapSize = info.localMapSize;
1461  msg.localScanMapSize = info.localScanMapSize;
1462  msg.localKeyFrames = info.localKeyFrames;
1463  msg.localBundleOutliers = info.localBundleOutliers;
1464  msg.localBundleConstraints = info.localBundleConstraints;
1465  msg.localBundleTime = info.localBundleTime;
1466  msg.keyFrameAdded = info.keyFrameAdded;
1467  msg.timeEstimation = info.timeEstimation;
1468  msg.timeParticleFiltering = info.timeParticleFiltering;
1469  msg.stamp = info.stamp;
1470  msg.interval = info.interval;
1471  msg.distanceTravelled = info.distanceTravelled;
1472  msg.memoryUsage = info.memoryUsage;
1473  msg.gravityRollError = info.gravityRollError;
1474  msg.gravityPitchError = info.gravityPitchError;
1475 
1476  msg.type = info.type;
1477 
1478  msg.wordsKeys = uKeys(info.words);
1479  keypointsToROS(uValues(info.words), msg.wordsValues);
1480 
1481  msg.wordMatches = info.reg.matchesIDs;
1482  msg.wordInliers = info.reg.inliersIDs;
1483 
1484  points2fToROS(info.refCorners, msg.refCorners);
1485  points2fToROS(info.newCorners, msg.newCorners);
1486  msg.cornerInliers = info.cornerInliers;
1487 
1488  transformToGeometryMsg(info.transform, msg.transform);
1489  transformToGeometryMsg(info.transformFiltered, msg.transformFiltered);
1490  transformToGeometryMsg(info.transformGroundTruth, msg.transformGroundTruth);
1491  transformToGeometryMsg(info.guessVelocity, msg.guessVelocity);
1492 
1493  msg.localMapKeys = uKeys(info.localMap);
1494  points3fToROS(uValues(info.localMap), msg.localMapValues);
1495 
1497  msg.localScanMapFormat = info.localScanMap.format();
1498 }
1499 
1500 cv::Mat userDataFromROS(const rtabmap_ros::UserData & dataMsg)
1501 {
1502  cv::Mat data;
1503  if(!dataMsg.data.empty())
1504  {
1505  if(dataMsg.cols > 0 && dataMsg.rows > 0 && dataMsg.type >= 0)
1506  {
1507  data = cv::Mat(dataMsg.rows, dataMsg.cols, dataMsg.type, (void*)dataMsg.data.data()).clone();
1508  }
1509  else
1510  {
1511  if(dataMsg.cols != (int)dataMsg.data.size() || dataMsg.rows != 1 || dataMsg.type != CV_8UC1)
1512  {
1513  ROS_ERROR("cols, rows and type fields of the UserData msg "
1514  "are not correctly set (cols=%d, rows=%d, type=%d)! We assume that the data "
1515  "is compressed (cols=%d, rows=1, type=%d(CV_8UC1)).",
1516  dataMsg.cols, dataMsg.rows, dataMsg.type, (int)dataMsg.data.size(), CV_8UC1);
1517 
1518  }
1519  data = cv::Mat(1, dataMsg.data.size(), CV_8UC1, (void*)dataMsg.data.data()).clone();
1520  }
1521  }
1522  return data;
1523 }
1524 void userDataToROS(const cv::Mat & data, rtabmap_ros::UserData & dataMsg, bool compress)
1525 {
1526  if(!data.empty())
1527  {
1528  if(compress)
1529  {
1530  dataMsg.data = rtabmap::compressData(data);
1531  dataMsg.rows = 1;
1532  dataMsg.cols = dataMsg.data.size();
1533  dataMsg.type = CV_8UC1;
1534  }
1535  else
1536  {
1537  dataMsg.data.resize(data.step[0] * data.rows); // use step for non-contiguous matrices
1538  memcpy(dataMsg.data.data(), data.data, dataMsg.data.size());
1539  dataMsg.rows = data.rows;
1540  dataMsg.cols = data.cols;
1541  dataMsg.type = data.type();
1542  }
1543  }
1544 }
1545 
1547  const std::map<int, geometry_msgs::PoseWithCovarianceStamped> & tags,
1548  const std::string & frameId,
1549  const std::string & odomFrameId,
1550  const ros::Time & odomStamp,
1552  double waitForTransform,
1553  double defaultLinVariance,
1554  double defaultAngVariance)
1555 {
1556  //tag detections
1557  rtabmap::Landmarks landmarks;
1558  for(std::map<int, geometry_msgs::PoseWithCovarianceStamped>::const_iterator iter=tags.begin(); iter!=tags.end(); ++iter)
1559  {
1560  if(iter->first <=0)
1561  {
1562  ROS_ERROR("Invalid landmark received! IDs should be > 0 (it is %d). Ignoring this landmark.", iter->first);
1563  continue;
1564  }
1566  frameId,
1567  iter->second.header.frame_id,
1568  iter->second.header.stamp,
1569  listener,
1570  waitForTransform);
1571 
1572  if(baseToCamera.isNull())
1573  {
1574  ROS_ERROR("Cannot transform tag pose from \"%s\" frame to \"%s\" frame!",
1575  iter->second.header.frame_id.c_str(), frameId.c_str());
1576  continue;
1577  }
1578 
1579  rtabmap::Transform baseToTag = baseToCamera * transformFromPoseMsg(iter->second.pose.pose);
1580 
1581  if(!baseToTag.isNull())
1582  {
1583  // Correction of the global pose accounting the odometry movement since we received it
1585  frameId,
1586  odomFrameId,
1587  iter->second.header.stamp,
1588  odomStamp,
1589  listener,
1590  waitForTransform);
1591  if(!correction.isNull())
1592  {
1593  baseToTag = correction * baseToTag;
1594  }
1595  else
1596  {
1597  ROS_WARN("Could not adjust tag pose accordingly to latest odometry pose. "
1598  "If odometry is small since it received the tag pose and "
1599  "covariance is large, this should not be a problem.");
1600  }
1601  cv::Mat covariance = cv::Mat(6,6, CV_64FC1, (void*)iter->second.pose.covariance.data()).clone();
1602  if(covariance.empty() || !uIsFinite(covariance.at<double>(0,0)) || covariance.at<double>(0,0)<=0.0f)
1603  {
1604  covariance = cv::Mat::eye(6,6,CV_64FC1);
1605  covariance(cv::Range(0,3), cv::Range(0,3)) *= defaultLinVariance;
1606  covariance(cv::Range(3,6), cv::Range(3,6)) *= defaultAngVariance;
1607  }
1608  landmarks.insert(std::make_pair(iter->first, rtabmap::Landmark(iter->first, baseToTag, covariance)));
1609  }
1610  }
1611  return landmarks;
1612 }
1613 
1615  const std::string & fromFrameId,
1616  const std::string & toFrameId,
1617  const ros::Time & stamp,
1619  double waitForTransform)
1620 {
1621  // TF ready?
1622  rtabmap::Transform transform;
1623  try
1624  {
1625  if(waitForTransform > 0.0 && !stamp.isZero())
1626  {
1627  //if(!tfBuffer_.canTransform(fromFrameId, toFrameId, stamp, ros::Duration(1)))
1628  std::string errorMsg;
1629  if(!listener.waitForTransform(fromFrameId, toFrameId, stamp, ros::Duration(waitForTransform), ros::Duration(0.01), &errorMsg))
1630  {
1631  ROS_WARN("Could not get transform from %s to %s after %f seconds (for stamp=%f)! Error=\"%s\".",
1632  fromFrameId.c_str(), toFrameId.c_str(), waitForTransform, stamp.toSec(), errorMsg.c_str());
1633  return transform;
1634  }
1635  }
1636 
1638  listener.lookupTransform(fromFrameId, toFrameId, stamp, tmp);
1639  transform = rtabmap_ros::transformFromTF(tmp);
1640  }
1641  catch(tf::TransformException & ex)
1642  {
1643  ROS_WARN("(getting transform %s -> %s) %s", fromFrameId.c_str(), toFrameId.c_str(), ex.what());
1644  }
1645  return transform;
1646 }
1647 
1648 // get moving transform accordingly to a fixed frame. For example get
1649 // transform between moving /base_link between two stamps accordingly to /odom frame.
1651  const std::string & sourceTargetFrame,
1652  const std::string & fixedFrame,
1653  const ros::Time & stampSource,
1654  const ros::Time & stampTarget,
1656  double waitForTransform)
1657 {
1658  // TF ready?
1659  rtabmap::Transform transform;
1660  try
1661  {
1662  ros::Time stamp = stampSource>stampTarget?stampSource:stampTarget;
1663  if(waitForTransform > 0.0 && !stamp.isZero())
1664  {
1665  std::string errorMsg;
1666  if(!listener.waitForTransform(sourceTargetFrame, fixedFrame, stamp, ros::Duration(waitForTransform), ros::Duration(0.01), &errorMsg))
1667  {
1668  ROS_WARN("Could not get transform from %s to %s accordingly to %s after %f seconds (for stamps=%f -> %f)! Error=\"%s\".",
1669  sourceTargetFrame.c_str(), sourceTargetFrame.c_str(), fixedFrame.c_str(), waitForTransform, stampSource.toSec(), stampTarget.toSec(), errorMsg.c_str());
1670  return transform;
1671  }
1672  }
1673 
1675  listener.lookupTransform(sourceTargetFrame, stampTarget, sourceTargetFrame, stampSource, fixedFrame, tmp);
1676  transform = rtabmap_ros::transformFromTF(tmp);
1677  }
1678  catch(tf::TransformException & ex)
1679  {
1680  ROS_WARN("(getting transform movement of %s according to fixed %s) %s", sourceTargetFrame.c_str(), fixedFrame.c_str(), ex.what());
1681  }
1682  return transform;
1683 }
1684 
1686  const std::vector<cv_bridge::CvImageConstPtr> & imageMsgs,
1687  const std::vector<cv_bridge::CvImageConstPtr> & depthMsgs,
1688  const std::vector<sensor_msgs::CameraInfo> & cameraInfoMsgs,
1689  const std::string & frameId,
1690  const std::string & odomFrameId,
1691  const ros::Time & odomStamp,
1692  cv::Mat & rgb,
1693  cv::Mat & depth,
1694  std::vector<rtabmap::CameraModel> & cameraModels,
1696  double waitForTransform,
1697  const std::vector<std::vector<rtabmap_ros::KeyPoint> > & localKeyPointsMsgs,
1698  const std::vector<std::vector<rtabmap_ros::Point3f> > & localPoints3dMsgs,
1699  const std::vector<cv::Mat> & localDescriptorsMsgs,
1700  std::vector<cv::KeyPoint> * localKeyPoints,
1701  std::vector<cv::Point3f> * localPoints3d,
1702  cv::Mat * localDescriptors)
1703 {
1704  UASSERT(imageMsgs.size()>0 &&
1705  (imageMsgs.size() == depthMsgs.size() || depthMsgs.empty()) &&
1706  imageMsgs.size() == cameraInfoMsgs.size());
1707 
1708  int imageWidth = imageMsgs[0]->image.cols;
1709  int imageHeight = imageMsgs[0]->image.rows;
1710  int depthWidth = depthMsgs.size()?depthMsgs[0]->image.cols:0;
1711  int depthHeight = depthMsgs.size()?depthMsgs[0]->image.rows:0;
1712 
1713  if(depthMsgs.size())
1714  {
1715  UASSERT_MSG(
1716  imageWidth % depthWidth == 0 && imageHeight % depthHeight == 0 &&
1717  imageWidth/depthWidth == imageHeight/depthHeight,
1718  uFormat("rgb=%dx%d depth=%dx%d", imageWidth, imageHeight, depthWidth, depthHeight).c_str());
1719  }
1720 
1721  int cameraCount = imageMsgs.size();
1722  for(unsigned int i=0; i<imageMsgs.size(); ++i)
1723  {
1724  if(!(imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1) == 0 ||
1725  imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::MONO8) ==0 ||
1726  imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::MONO16) ==0 ||
1727  imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
1728  imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0 ||
1729  imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::BGRA8) == 0 ||
1730  imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::RGBA8) == 0 ||
1731  imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::BAYER_GRBG8) == 0 ||
1732  imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::BAYER_RGGB8) == 0))
1733  {
1734 
1735  ROS_ERROR("Input rgb type must be image=mono8,mono16,rgb8,bgr8,bgra8,rgba8. Current rgb=%s",
1736  imageMsgs[i]->encoding.c_str());
1737  return false;
1738  }
1739  if(depthMsgs.size() &&
1740  !(depthMsgs[i]->encoding.compare(sensor_msgs::image_encodings::TYPE_16UC1) == 0 ||
1741  depthMsgs[i]->encoding.compare(sensor_msgs::image_encodings::TYPE_32FC1) == 0 ||
1742  depthMsgs[i]->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0))
1743  {
1744  ROS_ERROR("Input depth type must be image_depth=32FC1,16UC1,mono16. Current depth=%s",
1745  depthMsgs[i]->encoding.c_str());
1746  return false;
1747  }
1748 
1749  UASSERT_MSG(imageMsgs[i]->image.cols == imageWidth && imageMsgs[i]->image.rows == imageHeight,
1750  uFormat("imageWidth=%d vs %d imageHeight=%d vs %d",
1751  imageWidth,
1752  imageMsgs[i]->image.cols,
1753  imageHeight,
1754  imageMsgs[i]->image.rows).c_str());
1755  ros::Time stamp;
1756  if(depthMsgs.size())
1757  {
1758  UASSERT_MSG(depthMsgs[i]->image.cols == depthWidth && depthMsgs[i]->image.rows == depthHeight,
1759  uFormat("depthWidth=%d vs %d imageHeight=%d vs %d",
1760  depthWidth,
1761  depthMsgs[i]->image.cols,
1762  depthHeight,
1763  depthMsgs[i]->image.rows).c_str());
1764  stamp = depthMsgs[i]->header.stamp;
1765  }
1766  else
1767  {
1768  stamp = imageMsgs[i]->header.stamp;
1769  }
1770 
1771  // use depth's stamp so that geometry is sync to odom, use rgb frame as we assume depth is registered (normally depth msg should have same frame than rgb)
1772  rtabmap::Transform localTransform = rtabmap_ros::getTransform(frameId, imageMsgs[i]->header.frame_id, stamp, listener, waitForTransform);
1773  if(localTransform.isNull())
1774  {
1775  ROS_ERROR("TF of received image %d at time %fs is not set!", i, stamp.toSec());
1776  return false;
1777  }
1778  // sync with odometry stamp
1779  if(!odomFrameId.empty() && odomStamp != stamp)
1780  {
1781  rtabmap::Transform sensorT = getTransform(
1782  frameId,
1783  odomFrameId,
1784  odomStamp,
1785  stamp,
1786  listener,
1787  waitForTransform);
1788  if(sensorT.isNull())
1789  {
1790  ROS_WARN("Could not get odometry value for depth image stamp (%fs). Latest odometry "
1791  "stamp is %fs. The depth image pose will not be synchronized with odometry.", stamp.toSec(), odomStamp.toSec());
1792  }
1793  else
1794  {
1795  //ROS_WARN("RGBD correction = %s (time diff=%fs)", sensorT.prettyPrint().c_str(), fabs(stamp.toSec()-odomStamp.toSec()));
1796  localTransform = sensorT * localTransform;
1797  }
1798  }
1799 
1800  cv_bridge::CvImageConstPtr ptrImage = imageMsgs[i];
1801  if(imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::TYPE_8UC1)==0 ||
1802  imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
1803  imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0)
1804  {
1805  // do nothing
1806  }
1807  else if(imageMsgs[i]->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0)
1808  {
1809  ptrImage = cv_bridge::cvtColor(imageMsgs[i], "mono8");
1810  }
1811  else
1812  {
1813  ptrImage = cv_bridge::cvtColor(imageMsgs[i], "bgr8");
1814  }
1815 
1816  // initialize
1817  if(rgb.empty())
1818  {
1819  rgb = cv::Mat(imageHeight, imageWidth*cameraCount, ptrImage->image.type());
1820  }
1821  if(ptrImage->image.type() == rgb.type())
1822  {
1823  ptrImage->image.copyTo(cv::Mat(rgb, cv::Rect(i*imageWidth, 0, imageWidth, imageHeight)));
1824  }
1825  else
1826  {
1827  ROS_ERROR("Some RGB images are not the same type!");
1828  return false;
1829  }
1830 
1831  if(depthMsgs.size())
1832  {
1833  cv_bridge::CvImageConstPtr ptrDepth = depthMsgs[i];
1834  cv::Mat subDepth = ptrDepth->image;
1835 
1836  if(depth.empty())
1837  {
1838  depth = cv::Mat(depthHeight, depthWidth*cameraCount, subDepth.type());
1839  }
1840 
1841  if(subDepth.type() == depth.type())
1842  {
1843  subDepth.copyTo(cv::Mat(depth, cv::Rect(i*depthWidth, 0, depthWidth, depthHeight)));
1844  }
1845  else
1846  {
1847  ROS_ERROR("Some Depth images are not the same type!");
1848  return false;
1849  }
1850  }
1851 
1852  cameraModels.push_back(rtabmap_ros::cameraModelFromROS(cameraInfoMsgs[i], localTransform));
1853 
1854  if(localKeyPoints && localKeyPointsMsgs.size() == imageMsgs.size())
1855  {
1856  rtabmap_ros::keypointsFromROS(localKeyPointsMsgs[i], *localKeyPoints, imageWidth*i);
1857  }
1858  if(localPoints3d && localPoints3dMsgs.size() == imageMsgs.size())
1859  {
1860  // Points should be in base frame
1861  rtabmap_ros::points3fFromROS(localPoints3dMsgs[i], *localPoints3d, localTransform);
1862  }
1863  if(localDescriptors && localDescriptorsMsgs.size() == imageMsgs.size())
1864  {
1865  localDescriptors->push_back(localDescriptorsMsgs[i]);
1866  }
1867  }
1868  return true;
1869 }
1870 
1872  const cv_bridge::CvImageConstPtr& leftImageMsg,
1873  const cv_bridge::CvImageConstPtr& rightImageMsg,
1874  const sensor_msgs::CameraInfo& leftCamInfoMsg,
1875  const sensor_msgs::CameraInfo& rightCamInfoMsg,
1876  const std::string & frameId,
1877  const std::string & odomFrameId,
1878  const ros::Time & odomStamp,
1879  cv::Mat & left,
1880  cv::Mat & right,
1881  rtabmap::StereoCameraModel & stereoModel,
1883  double waitForTransform,
1884  bool alreadyRectified)
1885 {
1886  UASSERT(leftImageMsg.get() && rightImageMsg.get());
1887 
1888  if(!(leftImageMsg->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
1889  leftImageMsg->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0 ||
1890  leftImageMsg->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
1891  leftImageMsg->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0 ||
1892  leftImageMsg->encoding.compare(sensor_msgs::image_encodings::BGRA8) == 0 ||
1893  leftImageMsg->encoding.compare(sensor_msgs::image_encodings::RGBA8) == 0) ||
1894  !(rightImageMsg->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
1895  rightImageMsg->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0 ||
1896  rightImageMsg->encoding.compare(sensor_msgs::image_encodings::BGR8) == 0 ||
1897  rightImageMsg->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0 ||
1898  rightImageMsg->encoding.compare(sensor_msgs::image_encodings::BGRA8) == 0 ||
1899  rightImageMsg->encoding.compare(sensor_msgs::image_encodings::RGBA8) == 0))
1900  {
1901  ROS_ERROR("Input type must be image=mono8,mono16,rgb8,bgr8,bgra8,rgba8");
1902  ROS_ERROR("Input type must be image=mono8,mono16,rgb8,bgr8,bgra8,rgba8 Current left=%s and right=%s",
1903  leftImageMsg->encoding.c_str(),
1904  rightImageMsg->encoding.c_str());
1905  return false;
1906  }
1907 
1908  if(leftImageMsg->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0 ||
1909  leftImageMsg->encoding.compare(sensor_msgs::image_encodings::MONO16) == 0)
1910  {
1911  left = cv_bridge::cvtColor(leftImageMsg, "mono8")->image;
1912  }
1913  else
1914  {
1915  left = cv_bridge::cvtColor(leftImageMsg, "bgr8")->image;
1916  }
1917  right = cv_bridge::cvtColor(rightImageMsg, "mono8")->image;
1918 
1919  rtabmap::Transform localTransform = getTransform(frameId, leftImageMsg->header.frame_id, leftImageMsg->header.stamp, listener, waitForTransform);
1920  if(localTransform.isNull())
1921  {
1922  return false;
1923  }
1924  // sync with odometry stamp
1925  if(!odomFrameId.empty() && odomStamp != leftImageMsg->header.stamp)
1926  {
1927  rtabmap::Transform sensorT = getTransform(
1928  frameId,
1929  odomFrameId,
1930  odomStamp,
1931  leftImageMsg->header.stamp,
1932  listener,
1933  waitForTransform);
1934  if(sensorT.isNull())
1935  {
1936  ROS_WARN("Could not get odometry value for stereo msg stamp (%fs). Latest odometry "
1937  "stamp is %fs. The stereo image pose will not be synchronized with odometry.", leftImageMsg->header.stamp.toSec(), odomStamp.toSec());
1938  }
1939  else
1940  {
1941  localTransform = sensorT * localTransform;
1942  }
1943  }
1944 
1945  stereoModel = rtabmap_ros::stereoCameraModelFromROS(leftCamInfoMsg, rightCamInfoMsg, localTransform);
1946 
1947  if(stereoModel.baseline() > 10.0)
1948  {
1949  static bool shown = false;
1950  if(!shown)
1951  {
1952  ROS_WARN("Detected baseline (%f m) is quite large! Is your "
1953  "right camera_info P(0,3) correctly set? Note that "
1954  "baseline=-P(0,3)/P(0,0). You may need to calibrate your camera. "
1955  "This warning is printed only once.",
1956  stereoModel.baseline());
1957  shown = true;
1958  }
1959  }
1960  else if(stereoModel.baseline() == 0 && alreadyRectified)
1961  {
1962  rtabmap::Transform stereoTransform = getTransform(
1963  leftCamInfoMsg.header.frame_id,
1964  rightCamInfoMsg.header.frame_id,
1965  leftCamInfoMsg.header.stamp,
1966  listener,
1967  waitForTransform);
1968  if(stereoTransform.isNull() || stereoTransform.x()<=0)
1969  {
1970  ROS_WARN("We cannot estimated the baseline of the rectified images with tf! (%s->%s = %s)",
1971  rightCamInfoMsg.header.frame_id.c_str(), leftCamInfoMsg.header.frame_id.c_str(), stereoTransform.prettyPrint().c_str());
1972  }
1973  else
1974  {
1975  static bool warned = false;
1976  if(!warned)
1977  {
1978  ROS_WARN("Right camera info doesn't have Tx set but we are assuming that stereo images are already rectified (see %s parameter). While not "
1979  "recommended, we used TF to get the baseline (%s->%s = %fm) for convenience (e.g., D400 ir stereo issue). It is preferred to feed "
1980  "a valid right camera info if stereo images are already rectified. This message is only printed once...",
1981  rtabmap::Parameters::kRtabmapImagesAlreadyRectified().c_str(),
1982  rightCamInfoMsg.header.frame_id.c_str(), leftCamInfoMsg.header.frame_id.c_str(), stereoTransform.x());
1983  warned = true;
1984  }
1985  stereoModel = rtabmap::StereoCameraModel(
1986  stereoModel.left().fx(),
1987  stereoModel.left().fy(),
1988  stereoModel.left().cx(),
1989  stereoModel.left().cy(),
1990  stereoTransform.x(),
1991  stereoModel.localTransform(),
1992  stereoModel.left().imageSize());
1993  }
1994  }
1995  return true;
1996 }
1997 
1999  const sensor_msgs::LaserScan & scan2dMsg,
2000  const std::string & frameId,
2001  const std::string & odomFrameId,
2002  const ros::Time & odomStamp,
2003  rtabmap::LaserScan & scan,
2005  double waitForTransform,
2006  bool outputInFrameId)
2007 {
2008  // make sure the frame of the laser is updated too
2010  odomFrameId.empty()?frameId:odomFrameId,
2011  scan2dMsg.header.frame_id,
2012  scan2dMsg.header.stamp + ros::Duration().fromSec(scan2dMsg.ranges.size()*scan2dMsg.time_increment),
2013  listener,
2014  waitForTransform);
2015  if(tmpT.isNull())
2016  {
2017  return false;
2018  }
2019 
2020  rtabmap::Transform scanLocalTransform = getTransform(
2021  frameId,
2022  scan2dMsg.header.frame_id,
2023  scan2dMsg.header.stamp,
2024  listener,
2025  waitForTransform);
2026  if(scanLocalTransform.isNull())
2027  {
2028  return false;
2029  }
2030 
2031  //transform in frameId_ frame
2032  sensor_msgs::PointCloud2 scanOut;
2034  projection.transformLaserScanToPointCloud(odomFrameId.empty()?frameId:odomFrameId, scan2dMsg, scanOut, listener);
2035 
2036  //transform back in laser frame
2037  rtabmap::Transform laserToOdom = getTransform(
2038  scan2dMsg.header.frame_id,
2039  odomFrameId.empty()?frameId:odomFrameId,
2040  scan2dMsg.header.stamp,
2041  listener,
2042  waitForTransform);
2043  if(laserToOdom.isNull())
2044  {
2045  return false;
2046  }
2047 
2048  // sync with odometry stamp
2049  if(!odomFrameId.empty() && odomStamp != scan2dMsg.header.stamp)
2050  {
2051  rtabmap::Transform sensorT = getTransform(
2052  frameId,
2053  odomFrameId,
2054  odomStamp,
2055  scan2dMsg.header.stamp,
2056  listener,
2057  waitForTransform);
2058  if(sensorT.isNull())
2059  {
2060  ROS_WARN("Could not get odometry value for laser scan stamp (%fs). Latest odometry "
2061  "stamp is %fs. The laser scan pose will not be synchronized with odometry.", scan2dMsg.header.stamp.toSec(), odomStamp.toSec());
2062  }
2063  else
2064  {
2065  //ROS_WARN("scan correction = %s (time diff=%fs)", sensorT.prettyPrint().c_str(), fabs(scan2dMsg->header.stamp.toSec()-odomStamp.toSec()));
2066  scanLocalTransform = sensorT * scanLocalTransform;
2067  }
2068  }
2069 
2070  if(outputInFrameId)
2071  {
2072  laserToOdom *= scanLocalTransform;
2073  }
2074 
2075  bool hasIntensity = false;
2076  for(unsigned int i=0; i<scanOut.fields.size(); ++i)
2077  {
2078  if(scanOut.fields[i].name.compare("intensity") == 0)
2079  {
2080  if(scanOut.fields[i].datatype == sensor_msgs::PointField::FLOAT32)
2081  {
2082  hasIntensity = true;
2083  }
2084  else
2085  {
2086  static bool warningShown = false;
2087  if(!warningShown)
2088  {
2089  ROS_WARN("The input scan cloud has an \"intensity\" field "
2090  "but the datatype (%d) is not supported. Intensity will be ignored. "
2091  "This message is only shown once.", scanOut.fields[i].datatype);
2092  warningShown = true;
2093  }
2094  }
2095  }
2096  }
2097 
2099  cv::Mat data;
2100  if(hasIntensity)
2101  {
2102  pcl::PointCloud<pcl::PointXYZI>::Ptr pclScan(new pcl::PointCloud<pcl::PointXYZI>);
2103  pcl::fromROSMsg(scanOut, *pclScan);
2104  pclScan->is_dense = true;
2105  data = rtabmap::util3d::laserScan2dFromPointCloud(*pclScan, laserToOdom); // put back in laser frame
2106  format = rtabmap::LaserScan::kXYI;
2107  }
2108  else
2109  {
2110  pcl::PointCloud<pcl::PointXYZ>::Ptr pclScan(new pcl::PointCloud<pcl::PointXYZ>);
2111  pcl::fromROSMsg(scanOut, *pclScan);
2112  pclScan->is_dense = true;
2113  data = rtabmap::util3d::laserScan2dFromPointCloud(*pclScan, laserToOdom); // put back in laser frame
2114  format = rtabmap::LaserScan::kXY;
2115  }
2116 
2117  rtabmap::Transform zAxis(0,0,1,0,0,0);
2118  if((scanLocalTransform.rotation()*zAxis).z() < 0)
2119  {
2120  cv::Mat flipScan;
2121  cv::flip(data, flipScan, 1);
2122  data = flipScan;
2123  }
2124 
2125  scan = rtabmap::LaserScan(
2126  data,
2127  format,
2128  scan2dMsg.range_min,
2129  scan2dMsg.range_max,
2130  scan2dMsg.angle_min,
2131  scan2dMsg.angle_max,
2132  scan2dMsg.angle_increment,
2133  outputInFrameId?rtabmap::Transform::getIdentity():scanLocalTransform);
2134 
2135  return true;
2136 }
2137 
2139  const sensor_msgs::PointCloud2 & scan3dMsg,
2140  const std::string & frameId,
2141  const std::string & odomFrameId,
2142  const ros::Time & odomStamp,
2143  rtabmap::LaserScan & scan,
2145  double waitForTransform,
2146  int maxPoints,
2147  float maxRange)
2148 {
2149  bool hasNormals = false;
2150  bool hasColors = false;
2151  bool hasIntensity = false;
2152  for(unsigned int i=0; i<scan3dMsg.fields.size(); ++i)
2153  {
2154  if(scan3dMsg.fields[i].name.compare("normal_x") == 0)
2155  {
2156  hasNormals = true;
2157  }
2158  if(scan3dMsg.fields[i].name.compare("rgb") == 0 || scan3dMsg.fields[i].name.compare("rgba") == 0)
2159  {
2160  hasColors = true;
2161  }
2162  if(scan3dMsg.fields[i].name.compare("intensity") == 0)
2163  {
2164  if(scan3dMsg.fields[i].datatype == sensor_msgs::PointField::FLOAT32)
2165  {
2166  hasIntensity = true;
2167  }
2168  else
2169  {
2170  static bool warningShown = false;
2171  if(!warningShown)
2172  {
2173  ROS_WARN("The input scan cloud has an \"intensity\" field "
2174  "but the datatype (%d) is not supported. Intensity will be ignored. "
2175  "This message is only shown once.", scan3dMsg.fields[i].datatype);
2176  warningShown = true;
2177  }
2178  }
2179  }
2180  }
2181 
2182  rtabmap::Transform scanLocalTransform = getTransform(frameId, scan3dMsg.header.frame_id, scan3dMsg.header.stamp, listener, waitForTransform);
2183  if(scanLocalTransform.isNull())
2184  {
2185  ROS_ERROR("TF of received scan cloud at time %fs is not set, aborting rtabmap update.", scan3dMsg.header.stamp.toSec());
2186  return false;
2187  }
2188 
2189  // sync with odometry stamp
2190  if(!odomFrameId.empty() && odomStamp != scan3dMsg.header.stamp)
2191  {
2192  rtabmap::Transform sensorT = getTransform(
2193  frameId,
2194  odomFrameId,
2195  odomStamp,
2196  scan3dMsg.header.stamp,
2197  listener,
2198  waitForTransform);
2199  if(sensorT.isNull())
2200  {
2201  ROS_WARN("Could not get odometry value for laser scan stamp (%fs). Latest odometry "
2202  "stamp is %fs. The 3d laser scan pose will not be synchronized with odometry.", scan3dMsg.header.stamp.toSec(), odomStamp.toSec());
2203  }
2204  else
2205  {
2206  scanLocalTransform = sensorT * scanLocalTransform;
2207  }
2208  }
2209 
2210  if(hasNormals)
2211  {
2212  if(hasColors)
2213  {
2214  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pclScan(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
2215  pcl::fromROSMsg(scan3dMsg, *pclScan);
2216  if(!pclScan->is_dense)
2217  {
2219  }
2220  scan = rtabmap::LaserScan(rtabmap::util3d::laserScanFromPointCloud(*pclScan), maxPoints, maxRange, rtabmap::LaserScan::kXYZRGBNormal, scanLocalTransform);
2221  }
2222  else if(hasIntensity)
2223  {
2224  pcl::PointCloud<pcl::PointXYZINormal>::Ptr pclScan(new pcl::PointCloud<pcl::PointXYZINormal>);
2225  pcl::fromROSMsg(scan3dMsg, *pclScan);
2226  if(!pclScan->is_dense)
2227  {
2229  }
2230  scan = rtabmap::LaserScan(rtabmap::util3d::laserScanFromPointCloud(*pclScan), maxPoints, maxRange, rtabmap::LaserScan::kXYZINormal, scanLocalTransform);
2231  }
2232  else
2233  {
2234  pcl::PointCloud<pcl::PointNormal>::Ptr pclScan(new pcl::PointCloud<pcl::PointNormal>);
2235  pcl::fromROSMsg(scan3dMsg, *pclScan);
2236  if(!pclScan->is_dense)
2237  {
2239  }
2240  scan = rtabmap::LaserScan(rtabmap::util3d::laserScanFromPointCloud(*pclScan), maxPoints, maxRange, rtabmap::LaserScan::kXYZNormal, scanLocalTransform);
2241  }
2242  }
2243  else
2244  {
2245  if(hasColors)
2246  {
2247  pcl::PointCloud<pcl::PointXYZRGB>::Ptr pclScan(new pcl::PointCloud<pcl::PointXYZRGB>);
2248  pcl::fromROSMsg(scan3dMsg, *pclScan);
2249  if(!pclScan->is_dense)
2250  {
2251  pclScan = rtabmap::util3d::removeNaNFromPointCloud(pclScan);
2252  }
2253  scan = rtabmap::LaserScan(rtabmap::util3d::laserScanFromPointCloud(*pclScan), maxPoints, maxRange, rtabmap::LaserScan::kXYZRGB, scanLocalTransform);
2254  }
2255  else if(hasIntensity)
2256  {
2257  pcl::PointCloud<pcl::PointXYZI>::Ptr pclScan(new pcl::PointCloud<pcl::PointXYZI>);
2258  pcl::fromROSMsg(scan3dMsg, *pclScan);
2259  if(!pclScan->is_dense)
2260  {
2261  pclScan = rtabmap::util3d::removeNaNFromPointCloud(pclScan);
2262  }
2263  scan = rtabmap::LaserScan(rtabmap::util3d::laserScanFromPointCloud(*pclScan), maxPoints, maxRange, rtabmap::LaserScan::kXYZI, scanLocalTransform);
2264  }
2265  else
2266  {
2267  pcl::PointCloud<pcl::PointXYZ>::Ptr pclScan(new pcl::PointCloud<pcl::PointXYZ>);
2268  pcl::fromROSMsg(scan3dMsg, *pclScan);
2269  if(!pclScan->is_dense)
2270  {
2271  pclScan = rtabmap::util3d::removeNaNFromPointCloud(pclScan);
2272  }
2273  scan = rtabmap::LaserScan(rtabmap::util3d::laserScanFromPointCloud(*pclScan), maxPoints, maxRange, rtabmap::LaserScan::kXYZ, scanLocalTransform);
2274  }
2275  }
2276  return true;
2277 }
2278 
2279 }
bool extended() const
CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
void globalDescriptorsToROS(const std::vector< rtabmap::GlobalDescriptor > &desc, std::vector< rtabmap_ros::GlobalDescriptor > &msg)
const std::string BAYER_GRBG8
GLM_FUNC_DECL T roll(detail::tquat< T, P > const &x)
int imageWidth() const
void keypointsToROS(const std::vector< cv::KeyPoint > &kpts, std::vector< rtabmap_ros::KeyPoint > &msg)
double baseline() const
void transformEigenToTF(const Eigen::Affine3d &e, tf::Transform &t)
cv::Mat K_raw() const
std::vector< cv::Point2f > refCorners
rtabmap::Landmarks landmarksFromROS(const std::map< int, geometry_msgs::PoseWithCovarianceStamped > &tags, const std::string &frameId, const std::string &odomFrameId, const ros::Time &odomStamp, tf::TransformListener &listener, double waitForTransform, double defaultLinVariance, double defaultAngVariance)
void transformToTF(const rtabmap::Transform &transform, tf::Transform &tfTransform)
cv::Mat RTABMAP_EXP uncompressData(const cv::Mat &bytes)
std::string uFormat(const char *fmt,...)
const cv::Mat data() const
const std::map< int, std::string > & labels() const
std::string prettyPrint() const
Transform transformGroundTruth
const cv::Size & imageSize() const
bool convertScan3dMsg(const sensor_msgs::PointCloud2 &scan3dMsg, const std::string &frameId, const std::string &odomFrameId, const ros::Time &odomStamp, rtabmap::LaserScan &scan, tf::TransformListener &listener, double waitForTransform, int maxPoints=0, float maxRange=0.0f)
cv::Point3f RTABMAP_EXP transformPoint(const cv::Point3f &pt, const Transform &transform)
const cv::Mat & data() const
rtabmap::Signature nodeDataFromROS(const rtabmap_ros::NodeData &msg)
Format format() const
const LaserScan & laserScanCompressed() const
const Transform & getGroundTruthPose() const
void odomInfoToROS(const rtabmap::OdometryInfo &info, rtabmap_ros::OdomInfo &msg)
void transformTFToEigen(const tf::Transform &t, Eigen::Affine3d &e)
int mapId() const
rtabmap::EnvSensor envSensorFromROS(const rtabmap_ros::EnvSensor &msg)
f
void points2fToROS(const std::vector< cv::Point2f > &kpts, std::vector< rtabmap_ros::Point2f > &msg)
rtabmap::SensorData rgbdImageFromROS(const rtabmap_ros::RGBDImageConstPtr &image)
Transform transformFiltered
void mapGraphToROS(const std::map< int, rtabmap::Transform > &poses, const std::multimap< int, rtabmap::Link > &links, const rtabmap::Transform &mapToOdom, rtabmap_ros::MapGraph &msg)
void poseEigenToMsg(const Eigen::Affine3d &e, geometry_msgs::Pose &m)
GLM_FUNC_DECL vecType< T, P > sqrt(vecType< T, P > const &x)
rtabmap::OdometryInfo odomInfoFromROS(const rtabmap_ros::OdomInfo &msg)
const std::map< int, float > & likelihood() const
std::vector< rtabmap::GlobalDescriptor > globalDescriptorsFromROS(const std::vector< rtabmap_ros::GlobalDescriptor > &msg)
void setLoopClosureId(int id)
const double & value() const
std::map< int, cv::Point3f > localMap
void setLabels(const std::map< int, std::string > &labels)
void setPosterior(const std::map< int, float > &posterior)
int proximityDetectionId() const
void poseMsgToEigen(const geometry_msgs::Pose &m, Eigen::Affine3d &e)
Transform rotation() const
void setRawLikelihood(const std::map< int, float > &rawLikelihood)
static Transform getIdentity()
const std::map< int, float > & posterior() const
float rangeMax() const
void setRefImageId(int id)
XmlRpcServer s
void setProximityDetectionId(int id)
pcl::PointCloud< pcl::PointXYZ >::Ptr RTABMAP_EXP removeNaNFromPointCloud(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud)
void transformEigenToMsg(const Eigen::Affine3d &e, geometry_msgs::Transform &m)
void transformToGeometryMsg(const rtabmap::Transform &transform, geometry_msgs::Transform &msg)
IMETHOD Vector diff(const Vector &p_w_a, const Vector &p_w_b, double dt=1)
std::map< int, Landmark > Landmarks
const cv::Mat & descriptors() const
float getNorm() const
std::string encoding
rtabmap::Signature nodeInfoFromROS(const rtabmap_ros::NodeData &msg)
rtabmap::Transform transformFromPoseMsg(const geometry_msgs::Pose &msg, bool ignoreRotationIfNotSet=false)
const std::string TYPE_8UC1
void fromROSMsg(const sensor_msgs::PointCloud2 &cloud, pcl::PointCloud< T > &pcl_cloud)
float gridCellSize() const
int maxPoints() const
const std::vector< cv::KeyPoint > & keypoints() const
void setCurrentGoalId(int goal)
std::vector< int > inliersIDs
const cv::Mat & gridObstacleCellsCompressed() const
void nodeInfoToROS(const rtabmap::Signature &signature, rtabmap_ros::NodeData &msg)
const cv::Mat & gridGroundCellsCompressed() const
GLM_FUNC_DECL T pitch(detail::tquat< T, P > const &x)
const cv::Mat & gridEmptyCellsCompressed() const
const EnvSensors & envSensors() const
const cv::Mat & imageRaw() const
void points3fToROS(const std::vector< cv::Point3f > &pts, std::vector< rtabmap_ros::Point3f > &msg, const rtabmap::Transform &transform=rtabmap::Transform())
#define ROS_WARN(...)
geometry_msgs::TransformStamped t
const std::map< int, int > & weights() const
bool uStrContains(const std::string &string, const std::string &substring)
rtabmap::Transform getTransform(const std::string &fromFrameId, const std::string &toFrameId, const ros::Time &stamp, tf::TransformListener &listener, double waitForTransform)
const std::vector< cv::KeyPoint > & getWordsKpts() const
bool isIdentity() const
cv::Mat R() const
cv::Mat RTABMAP_EXP laserScan2dFromPointCloud(const pcl::PointCloud< pcl::PointXYZ > &cloud, const Transform &transform=Transform(), bool filterNaNs=true)
const std::string & getLabel() const
void point2fToROS(const cv::Point2f &kpt, rtabmap_ros::Point2f &msg)
bool uIsFinite(const T &value)
bool waitForTransform(const std::string &target_frame, const std::string &source_frame, const ros::Time &time, const ros::Duration &timeout, const ros::Duration &polling_sleep_duration=ros::Duration(0.01), std::string *error_msg=NULL) const
double getStamp() const
#define UASSERT(condition)
cv::KeyPoint keypointFromROS(const rtabmap_ros::KeyPoint &msg)
const std::map< std::string, float > & data() const
void setLocalPath(const std::vector< int > &localPath)
bool isValidForProjection() const
const CameraModel & left() const
std::map< std::string, float > odomInfoToStatistics(const rtabmap::OdometryInfo &info)
double cx() const
const cv::Mat & depthOrRightCompressed() const
void mapDataFromROS(const rtabmap_ros::MapData &msg, std::map< int, rtabmap::Transform > &poses, std::multimap< int, rtabmap::Link > &links, std::map< int, rtabmap::Signature > &signatures, rtabmap::Transform &mapToOdom)
const std::multimap< int, int > & getWords() const
bool isNull() const
std::vector< int > cornerInliers
void setExtended(bool extended)
void linkToROS(const rtabmap::Link &link, rtabmap_ros::Link &msg)
rtabmap::Link linkFromROS(const rtabmap_ros::Link &msg)
#define UASSERT_MSG(condition, msg_str)
CvImagePtr cvtColor(const CvImageConstPtr &source, const std::string &encoding)
void rgbdImageToROS(const rtabmap::SensorData &data, rtabmap_ros::RGBDImage &msg, const std::string &sensorFrameId)
double fy() const
LaserScan RTABMAP_EXP transformLaserScan(const LaserScan &laserScan, const Transform &transform)
Duration & fromSec(double t)
void mapDataToROS(const std::map< int, rtabmap::Transform > &poses, const std::multimap< int, rtabmap::Link > &links, const std::map< int, rtabmap::Signature > &signatures, const rtabmap::Transform &mapToOdom, rtabmap_ros::MapData &msg)
const double & error() const
std::vector< unsigned char > RTABMAP_EXP compressData(const cv::Mat &data)
void envSensorsToROS(const rtabmap::EnvSensors &sensors, std::vector< rtabmap_ros::EnvSensor > &msg)
std::vector< cv::Point3f > points3fFromROS(const std::vector< rtabmap_ros::Point3f > &msg, const rtabmap::Transform &transform=rtabmap::Transform())
cv::Point3f point3fFromROS(const rtabmap_ros::Point3f &msg)
void infoFromROS(const rtabmap_ros::Info &info, rtabmap::Statistics &stat)
const std::string TYPE_32FC1
void mapGraphFromROS(const rtabmap_ros::MapGraph &msg, std::map< int, rtabmap::Transform > &poses, std::multimap< int, rtabmap::Link > &links, rtabmap::Transform &mapToOdom)
int currentIndex
Definition: patrol.py:9
cv::Mat compressedMatFromBytes(const std::vector< unsigned char > &bytes, bool copy=true)
rtabmap::CameraModel cameraModelFromROS(const sensor_msgs::CameraInfo &camInfo, const rtabmap::Transform &localTransform=rtabmap::Transform::getIdentity())
std::vector< cv::KeyPoint > keypointsFromROS(const std::vector< rtabmap_ros::KeyPoint > &msg)
CvImagePtr toCvCopy(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
const std::string TYPE_16UC1
rtabmap::EnvSensors envSensorsFromROS(const std::vector< rtabmap_ros::EnvSensor > &msg)
const cv::Mat & getWordsDescriptors() const
const Type & type() const
int id() const
Transform localTransform() const
const GPS & gps() const
void cameraModelToROS(const rtabmap::CameraModel &model, sensor_msgs::CameraInfo &camInfo)
const cv::Mat & depthOrRightRaw() const
cv::Mat P() const
const std::string MONO16
int getWeight() const
int loopClosureId() const
std::list< V > uValues(const std::multimap< K, V > &mm)
bool convertRGBDMsgs(const std::vector< cv_bridge::CvImageConstPtr > &imageMsgs, const std::vector< cv_bridge::CvImageConstPtr > &depthMsgs, const std::vector< sensor_msgs::CameraInfo > &cameraInfoMsgs, const std::string &frameId, const std::string &odomFrameId, const ros::Time &odomStamp, cv::Mat &rgb, cv::Mat &depth, std::vector< rtabmap::CameraModel > &cameraModels, tf::TransformListener &listener, double waitForTransform, const std::vector< std::vector< rtabmap_ros::KeyPoint > > &localKeyPointsMsgs=std::vector< std::vector< rtabmap_ros::KeyPoint > >(), const std::vector< std::vector< rtabmap_ros::Point3f > > &localPoints3dMsgs=std::vector< std::vector< rtabmap_ros::Point3f > >(), const std::vector< cv::Mat > &localDescriptorsMsgs=std::vector< cv::Mat >(), std::vector< cv::KeyPoint > *localKeyPoints=0, std::vector< cv::Point3f > *localPoints3d=0, cv::Mat *localDescriptors=0)
double stamp() const
const Transform & loopClosureTransform() const
double cy() const
void lookupTransform(const std::string &target_frame, const std::string &source_frame, const ros::Time &time, StampedTransform &transform) const
cv::Mat userDataFromROS(const rtabmap_ros::UserData &dataMsg)
const std::vector< GlobalDescriptor > & globalDescriptors() const
bool convertStereoMsg(const cv_bridge::CvImageConstPtr &leftImageMsg, const cv_bridge::CvImageConstPtr &rightImageMsg, const sensor_msgs::CameraInfo &leftCamInfoMsg, const sensor_msgs::CameraInfo &rightCamInfoMsg, const std::string &frameId, const std::string &odomFrameId, const ros::Time &odomStamp, cv::Mat &left, cv::Mat &right, rtabmap::StereoCameraModel &stereoModel, tf::TransformListener &listener, double waitForTransform, bool alreadyRectified)
const cv::Mat info() const
void setLikelihood(const std::map< int, float > &likelihood)
void transformToPoseMsg(const rtabmap::Transform &transform, geometry_msgs::Pose &msg)
string frameId
Definition: patrol.py:11
const cv::Mat & userDataCompressed() const
void keypointToROS(const cv::KeyPoint &kpt, rtabmap_ros::KeyPoint &msg)
static Transform fromEigen3d(const Eigen::Affine3d &matrix)
const double & longitude() const
bool convertScanMsg(const sensor_msgs::LaserScan &scan2dMsg, const std::string &frameId, const std::string &odomFrameId, const ros::Time &odomStamp, rtabmap::LaserScan &scan, tf::TransformListener &listener, double waitForTransform, bool outputInFrameId=false)
const std::vector< CameraModel > & cameraModels() const
void setWmState(const std::vector< int > &state)
const Transform & getPose() const
void transformMsgToEigen(const geometry_msgs::Transform &m, Eigen::Affine3d &e)
const double & altitude() const
rtabmap::GlobalDescriptor globalDescriptorFromROS(const rtabmap_ros::GlobalDescriptor &msg)
void setStamp(double stamp)
const std::vector< cv::Point3f > & getWords3() const
double fx() const
std::multimap< int, cv::KeyPoint > words
SensorData & sensorData()
const CameraModel & right() const
const std::vector< int > & wmState() const
std::list< K > uKeys(const std::multimap< K, V > &mm)
void toCvShare(const rtabmap_ros::RGBDImageConstPtr &image, cv_bridge::CvImageConstPtr &rgb, cv_bridge::CvImageConstPtr &depth)
void setWeights(const std::map< int, int > &weights)
#define UERROR(...)
std::vector< cv::Point2f > points2fFromROS(const std::vector< rtabmap_ros::Point2f > &msg)
void globalDescriptorToROS(const rtabmap::GlobalDescriptor &desc, rtabmap_ros::GlobalDescriptor &msg)
rtabmap::StereoCameraModel stereoCameraModelFromROS(const sensor_msgs::CameraInfo &leftCamInfo, const sensor_msgs::CameraInfo &rightCamInfo, const rtabmap::Transform &localTransform=rtabmap::Transform::getIdentity(), const rtabmap::Transform &stereoTransform=rtabmap::Transform())
const StereoCameraModel & stereoCameraModel() const
void envSensorToROS(const rtabmap::EnvSensor &sensor, rtabmap_ros::EnvSensor &msg)
int currentGoalId() const
float getAngle(float x=1.0f, float y=0.0f, float z=0.0f) const
double timestampFromROS(const ros::Time &stamp)
GLM_FUNC_DECL T yaw(detail::tquat< T, P > const &x)
std::vector< int > matchesIDs
const cv::Mat & imageCompressed() const
std::vector< cv::Point2f > newCorners
const std::string BAYER_RGGB8
void setLoopClosureTransform(const Transform &loopClosureTransform)
void toCvCopy(const rtabmap_ros::RGBDImage &image, cv_bridge::CvImagePtr &rgb, cv_bridge::CvImagePtr &depth)
RegistrationInfo reg
void userDataToROS(const cv::Mat &data, rtabmap_ros::UserData &dataMsg, bool compress)
void compressedMatToBytes(const cv::Mat &compressed, std::vector< unsigned char > &bytes)
pcl::PointCloud< pcl::PointNormal >::Ptr RTABMAP_EXP removeNaNNormalsFromPointCloud(const pcl::PointCloud< pcl::PointNormal >::Ptr &cloud)
Transform inverse() const
void transformLaserScanToPointCloud(const std::string &target_frame, const sensor_msgs::LaserScan &scan_in, sensor_msgs::PointCloud &cloud_out, tf::Transformer &tf, double range_cutoff, int channel_options=channel_option::Default)
std::map< EnvSensor::Type, EnvSensor > EnvSensors
#define ROS_ASSERT(cond)
rtabmap::Transform transformFromGeometryMsg(const geometry_msgs::Transform &msg)
cv::Mat D_raw() const
const std::vector< int > & localPath() const
const double & latitude() const
#define ROS_ERROR(...)
const Transform & localTransform() const
const double & bearing() const
void getTranslationAndEulerAngles(float &x, float &y, float &z, float &roll, float &pitch, float &yaw) const
int refImageId() const
void nodeDataToROS(const rtabmap::Signature &signature, rtabmap_ros::NodeData &msg)
int imageHeight() const
rtabmap::Transform transformFromTF(const tf::Transform &transform)
sensor_msgs::ImagePtr toImageMsg() const
Eigen::Affine3d toEigen3d() const
void point3fToROS(const cv::Point3f &pt, rtabmap_ros::Point3f &msg)
const cv::Point3f & gridViewPoint() const
V uValue(const std::map< K, V > &m, const K &key, const V &defaultValue=V())
std_msgs::Header header
cv::Point2f point2fFromROS(const rtabmap_ros::Point2f &msg)
const std::vector< cv::Point3f > & keypoints3D() const
void infoToROS(const rtabmap::Statistics &stats, rtabmap_ros::Info &info)
const double & stamp() const
cv::Mat RTABMAP_EXP uncompressImage(const cv::Mat &bytes)
void addStatistic(const std::string &name, float value)
const Transform & localTransform() const
const std::map< int, float > & rawLikelihood() const
LaserScan RTABMAP_EXP laserScanFromPointCloud(const pcl::PCLPointCloud2 &cloud, bool filterNaNs=true, bool is2D=false, const Transform &transform=Transform())
const double & stamp() const


rtabmap_ros
Author(s): Mathieu Labbe
autogenerated on Mon Dec 14 2020 03:42:19