OdometryFovis.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
30 #include "rtabmap/core/util2d.h"
32 #include "rtabmap/utilite/UTimer.h"
33 #include "rtabmap/utilite/UStl.h"
34 #include <opencv2/imgproc/types_c.h>
35 
36 #ifdef RTABMAP_FOVIS
37 #include <libfovis/fovis.hpp>
38 #endif
39 
40 namespace rtabmap {
41 
43  Odometry(parameters)
44 #ifdef RTABMAP_FOVIS
45  ,
46  fovis_(0),
47  rect_(0),
48  stereoCalib_(0),
49  depthImage_(0),
50  stereoDepth_(0),
51  lost_(false)
52 #endif
53 {
54  fovisParameters_ = Parameters::filterParameters(parameters, "OdomFovis");
55  if(parameters.find(Parameters::kOdomVisKeyFrameThr()) != parameters.end())
56  {
57  fovisParameters_.insert(*parameters.find(Parameters::kOdomVisKeyFrameThr()));
58  }
59 }
60 
62 {
63 #ifdef RTABMAP_FOVIS
64  delete fovis_;
65  delete rect_;
66  delete stereoCalib_;
67  delete depthImage_;
68  delete stereoDepth_;
69 #endif
70 }
71 
72 void OdometryFovis::reset(const Transform & initialPose)
73 {
74  Odometry::reset(initialPose);
75 #ifdef RTABMAP_FOVIS
76  if(fovis_)
77  {
78  delete fovis_;
79  fovis_ = 0;
80  }
81  if(rect_)
82  {
83  delete rect_;
84  rect_ = 0;
85  }
86  if(stereoCalib_)
87  {
88  delete stereoCalib_;
89  stereoCalib_ = 0;
90  }
91  if(depthImage_)
92  {
93  delete depthImage_;
94  depthImage_ = 0;
95  }
96  if(stereoDepth_)
97  {
98  delete stereoDepth_;
99  stereoDepth_ = 0;
100  }
101  lost_ = false;
103 #endif
104 }
105 
106 // return not null transform if odometry is correctly computed
108  SensorData & data,
109  const Transform & guess,
110  OdometryInfo * info)
111 {
112  UDEBUG("");
113  Transform t;
114 
115 #ifdef RTABMAP_FOVIS
116  UTimer timer;
117 
118  if(data.imageRaw().empty() ||
119  data.imageRaw().rows != data.depthOrRightRaw().rows ||
120  data.imageRaw().cols != data.depthOrRightRaw().cols)
121  {
122  UERROR("Not supported input!");
123  return t;
124  }
125 
126  if(!((data.cameraModels().size() == 1 &&
127  data.cameraModels()[0].isValidForReprojection()) ||
131  {
132  UERROR("Invalid camera model! Mono cameras=%d (reproj=%d), Stereo camera=%d (reproj=%d|%d)",
133  (int)data.cameraModels().size(),
134  data.cameraModels().size() && data.cameraModels()[0].isValidForReprojection()?1:0,
138  return t;
139  }
140 
141  cv::Mat gray;
142  if(data.imageRaw().type() == CV_8UC3)
143  {
144  cv::cvtColor(data.imageRaw(), gray, CV_BGR2GRAY);
145  }
146  else if(data.imageRaw().type() == CV_8UC1)
147  {
148  gray = data.imageRaw();
149  }
150  else
151  {
152  UFATAL("Not supported color type!");
153  }
154 
155  fovis::VisualOdometryOptions options;
156  if(fovis_ == 0 || (data.cameraModels().size() != 1 && stereoDepth_ == 0))
157  {
158  options = fovis::VisualOdometry::getDefaultOptions();
159 
160  ParametersMap defaults = Parameters::getDefaultParameters("OdomFovis");
161  options["feature-window-size"] = uValue(fovisParameters_, Parameters::kOdomFovisFeatureWindowSize(), defaults.at(Parameters::kOdomFovisFeatureWindowSize()));
162  options["max-pyramid-level"] = uValue(fovisParameters_, Parameters::kOdomFovisMaxPyramidLevel(), defaults.at(Parameters::kOdomFovisMaxPyramidLevel()));
163  options["min-pyramid-level"] = uValue(fovisParameters_, Parameters::kOdomFovisMinPyramidLevel(), defaults.at(Parameters::kOdomFovisMinPyramidLevel()));
164  options["target-pixels-per-feature"] = uValue(fovisParameters_, Parameters::kOdomFovisTargetPixelsPerFeature(), defaults.at(Parameters::kOdomFovisTargetPixelsPerFeature()));
165  options["fast-threshold"] = uValue(fovisParameters_, Parameters::kOdomFovisFastThreshold(), defaults.at(Parameters::kOdomFovisFastThreshold()));
166  options["use-adaptive-threshold"] = uValue(fovisParameters_, Parameters::kOdomFovisUseAdaptiveThreshold(), defaults.at(Parameters::kOdomFovisUseAdaptiveThreshold()));
167  options["fast-threshold-adaptive-gain"] = uValue(fovisParameters_, Parameters::kOdomFovisFastThresholdAdaptiveGain(), defaults.at(Parameters::kOdomFovisFastThresholdAdaptiveGain()));
168  options["use-homography-initialization"] = uValue(fovisParameters_, Parameters::kOdomFovisUseHomographyInitialization(), defaults.at(Parameters::kOdomFovisUseHomographyInitialization()));
169  options["ref-frame-change-threshold"] = uValue(fovisParameters_, Parameters::kOdomVisKeyFrameThr(), uNumber2Str(Parameters::defaultOdomVisKeyFrameThr()));
170 
171  // OdometryFrame
172  options["use-bucketing"] = uValue(fovisParameters_, Parameters::kOdomFovisUseBucketing(), defaults.at(Parameters::kOdomFovisUseBucketing()));
173  options["bucket-width"] = uValue(fovisParameters_, Parameters::kOdomFovisBucketWidth(), defaults.at(Parameters::kOdomFovisBucketWidth()));
174  options["bucket-height"] = uValue(fovisParameters_, Parameters::kOdomFovisBucketHeight(), defaults.at(Parameters::kOdomFovisBucketHeight()));
175  options["max-keypoints-per-bucket"] = uValue(fovisParameters_, Parameters::kOdomFovisMaxKeypointsPerBucket(), defaults.at(Parameters::kOdomFovisMaxKeypointsPerBucket()));
176  options["use-image-normalization"] = uValue(fovisParameters_, Parameters::kOdomFovisUseImageNormalization(), defaults.at(Parameters::kOdomFovisUseImageNormalization()));
177 
178  // MotionEstimator
179  options["inlier-max-reprojection-error"] = uValue(fovisParameters_, Parameters::kOdomFovisInlierMaxReprojectionError(), defaults.at(Parameters::kOdomFovisInlierMaxReprojectionError()));
180  options["clique-inlier-threshold"] = uValue(fovisParameters_, Parameters::kOdomFovisCliqueInlierThreshold(), defaults.at(Parameters::kOdomFovisCliqueInlierThreshold()));
181  options["min-features-for-estimate"] = uValue(fovisParameters_, Parameters::kOdomFovisMinFeaturesForEstimate(), defaults.at(Parameters::kOdomFovisMinFeaturesForEstimate()));
182  options["max-mean-reprojection-error"] = uValue(fovisParameters_, Parameters::kOdomFovisMaxMeanReprojectionError(), defaults.at(Parameters::kOdomFovisMaxMeanReprojectionError()));
183  options["use-subpixel-refinement"] = uValue(fovisParameters_, Parameters::kOdomFovisUseSubpixelRefinement(), defaults.at(Parameters::kOdomFovisUseSubpixelRefinement()));
184  options["feature-search-window"] = uValue(fovisParameters_, Parameters::kOdomFovisFeatureSearchWindow(), defaults.at(Parameters::kOdomFovisFeatureSearchWindow()));
185  options["update-target-features-with-refined"] = uValue(fovisParameters_, Parameters::kOdomFovisUpdateTargetFeaturesWithRefined(), defaults.at(Parameters::kOdomFovisUpdateTargetFeaturesWithRefined()));
186 
187  // StereoDepth
188  options["stereo-require-mutual-match"] = uValue(fovisParameters_, Parameters::kOdomFovisStereoRequireMutualMatch(), defaults.at(Parameters::kOdomFovisStereoRequireMutualMatch()));
189  options["stereo-max-dist-epipolar-line"] = uValue(fovisParameters_, Parameters::kOdomFovisStereoMaxDistEpipolarLine(), defaults.at(Parameters::kOdomFovisStereoMaxDistEpipolarLine()));
190  options["stereo-max-refinement-displacement"] = uValue(fovisParameters_, Parameters::kOdomFovisStereoMaxRefinementDisplacement(), defaults.at(Parameters::kOdomFovisStereoMaxRefinementDisplacement()));
191  options["stereo-max-disparity"] = uValue(fovisParameters_, Parameters::kOdomFovisStereoMaxDisparity(), defaults.at(Parameters::kOdomFovisStereoMaxDisparity()));
192  }
193 
194  fovis::DepthSource * depthSource = 0;
195  cv::Mat depth;
196  cv::Mat right;
197  Transform localTransform = Transform::getIdentity();
198  if(data.cameraModels().size() == 1) //depth
199  {
200  UDEBUG("");
201  fovis::CameraIntrinsicsParameters rgb_params;
202  memset(&rgb_params, 0, sizeof(fovis::CameraIntrinsicsParameters));
203  rgb_params.width = data.cameraModels()[0].imageWidth();
204  rgb_params.height = data.cameraModels()[0].imageHeight();
205 
206  rgb_params.fx = data.cameraModels()[0].fx();
207  rgb_params.fy = data.cameraModels()[0].fy();
208  rgb_params.cx = data.cameraModels()[0].cx()==0.0?double(rgb_params.width) / 2.0:data.cameraModels()[0].cx();
209  rgb_params.cy = data.cameraModels()[0].cy()==0.0?double(rgb_params.height) / 2.0:data.cameraModels()[0].cy();
210  localTransform = data.cameraModels()[0].localTransform();
211 
212  if(rect_ == 0)
213  {
214  UINFO("Init rgbd fovis: %dx%d fx=%f fy=%f cx=%f cy=%f", rgb_params.width, rgb_params.height, rgb_params.fx, rgb_params.fy, rgb_params.cx, rgb_params.cy);
215  rect_ = new fovis::Rectification(rgb_params);
216  }
217 
218  if(depthImage_ == 0)
219  {
220  depthImage_ = new fovis::DepthImage(rgb_params, rgb_params.width, rgb_params.height);
221  }
222  // make sure all zeros are NAN
223  if(data.depthRaw().type() == CV_32FC1)
224  {
225  depth = data.depthRaw();
226  for(int i=0; i<depth.rows; ++i)
227  {
228  for(int j=0; j<depth.cols; ++j)
229  {
230  float & d = depth.at<float>(i,j);
231  if(d == 0.0f)
232  {
233  d = NAN;
234  }
235  }
236  }
237  }
238  else if(data.depthRaw().type() == CV_16UC1)
239  {
240  depth = cv::Mat(data.depthRaw().size(), CV_32FC1);
241  for(int i=0; i<data.depthRaw().rows; ++i)
242  {
243  for(int j=0; j<data.depthRaw().cols; ++j)
244  {
245  float d = float(data.depthRaw().at<unsigned short>(i,j))/1000.0f;
246  depth.at<float>(i, j) = d==0.0f?NAN:d;
247  }
248  }
249  }
250  else
251  {
252  UFATAL("Unknown depth format!");
253  }
254  depthImage_->setDepthImage((float*)depth.data);
255  depthSource = depthImage_;
256  }
257  else // stereo
258  {
259  UDEBUG("");
260  // initialize left camera parameters
261  fovis::CameraIntrinsicsParameters left_parameters;
262  left_parameters.width = data.stereoCameraModel().left().imageWidth();
263  left_parameters.height = data.stereoCameraModel().left().imageHeight();
264  left_parameters.fx = data.stereoCameraModel().left().fx();
265  left_parameters.fy = data.stereoCameraModel().left().fy();
266  left_parameters.cx = data.stereoCameraModel().left().cx()==0.0?double(left_parameters.width) / 2.0:data.stereoCameraModel().left().cx();
267  left_parameters.cy = data.stereoCameraModel().left().cy()==0.0?double(left_parameters.height) / 2.0:data.stereoCameraModel().left().cy();
268  localTransform = data.stereoCameraModel().localTransform();
269 
270  if(rect_ == 0)
271  {
272  UINFO("Init stereo fovis: %dx%d fx=%f fy=%f cx=%f cy=%f", left_parameters.width, left_parameters.height, left_parameters.fx, left_parameters.fy, left_parameters.cx, left_parameters.cy);
273  rect_ = new fovis::Rectification(left_parameters);
274  }
275 
276  if(stereoCalib_ == 0)
277  {
278  // initialize right camera parameters
279  fovis::CameraIntrinsicsParameters right_parameters;
280  right_parameters.width = data.stereoCameraModel().right().imageWidth();
281  right_parameters.height = data.stereoCameraModel().right().imageHeight();
282  right_parameters.fx = data.stereoCameraModel().right().fx();
283  right_parameters.fy = data.stereoCameraModel().right().fy();
284  right_parameters.cx = data.stereoCameraModel().right().cx()==0.0?double(right_parameters.width) / 2.0:data.stereoCameraModel().right().cx();
285  right_parameters.cy = data.stereoCameraModel().right().cy()==0.0?double(right_parameters.height) / 2.0:data.stereoCameraModel().right().cy();
286 
287  // as we use rectified images, rotation is identity
288  // and translation is baseline only
289  fovis::StereoCalibrationParameters stereo_parameters;
290  stereo_parameters.left_parameters = left_parameters;
291  stereo_parameters.right_parameters = right_parameters;
292  stereo_parameters.right_to_left_rotation[0] = 1.0;
293  stereo_parameters.right_to_left_rotation[1] = 0.0;
294  stereo_parameters.right_to_left_rotation[2] = 0.0;
295  stereo_parameters.right_to_left_rotation[3] = 0.0;
296  stereo_parameters.right_to_left_translation[0] = -data.stereoCameraModel().baseline();
297  stereo_parameters.right_to_left_translation[1] = 0.0;
298  stereo_parameters.right_to_left_translation[2] = 0.0;
299 
300  stereoCalib_ = new fovis::StereoCalibration(stereo_parameters);
301  }
302 
303  if(stereoDepth_ == 0)
304  {
305  stereoDepth_ = new fovis::StereoDepth(stereoCalib_, options);
306  }
307  if(data.rightRaw().type() == CV_8UC3)
308  {
309  cv::cvtColor(data.rightRaw(), right, CV_BGR2GRAY);
310  }
311  else if(data.rightRaw().type() == CV_8UC1)
312  {
313  right = data.rightRaw();
314  }
315  else
316  {
317  UFATAL("Not supported color type!");
318  }
319  stereoDepth_->setRightImage(right.data);
320  depthSource = stereoDepth_;
321  }
322 
323  if(fovis_ == 0)
324  {
325  fovis_ = new fovis::VisualOdometry(rect_, options);
326  }
327 
328  UDEBUG("");
329  fovis_->processFrame(gray.data, depthSource);
330 
331  // get the motion estimate for this frame to the previous frame.
332  t = Transform::fromEigen3d(fovis_->getMotionEstimate());
333 
334  cv::Mat covariance;
335  fovis::MotionEstimateStatusCode statusCode = fovis_->getMotionEstimator()->getMotionEstimateStatus();
336  if(statusCode > fovis::SUCCESS)
337  {
338  UWARN("Fovis error status: %s", fovis::MotionEstimateStatusCodeStrings[statusCode]);
339  t.setNull();
340  lost_ = true;
341  covariance = cv::Mat::eye(6,6, CV_64FC1)*9999.0;
343  }
344  else if(lost_)
345  {
346  lost_ = false;
347  // we are not lost anymore but we don't know where we are now according to last valid pose
348  covariance = cv::Mat::eye(6,6, CV_64FC1)*9999.0;
350  }
351  else
352  {
353  const Eigen::MatrixXd& cov = fovis_->getMotionEstimator()->getMotionEstimateCov();
354  if(cov.cols() == 6 && cov.rows() == 6 && cov(0,0) > 0.0)
355  {
356  covariance = cv::Mat::eye(6,6, CV_64FC1);
357  memcpy(covariance.data, cov.data(), 36*sizeof(double));
358  covariance *= 100.0; // to be in the same scale than loop closure detection
359  }
360  }
361 
362  if(!t.isNull() && !t.isIdentity() && !localTransform.isIdentity() && !localTransform.isNull())
363  {
364  // from camera frame to base frame
366  {
367  t = previousLocalTransform_ * t * localTransform.inverse();
368  }
369  else
370  {
371  t = localTransform * t * localTransform.inverse();
372  }
373  previousLocalTransform_ = localTransform;
374  }
375 
376  if(info)
377  {
378  info->type = (int)kTypeFovis;
379  info->keyFrameAdded = fovis_->getChangeReferenceFrames();
380  info->features = fovis_->getTargetFrame()->getNumDetectedKeypoints();
381  info->reg.matches = fovis_->getMotionEstimator()->getNumMatches();
382  info->reg.inliers = fovis_->getMotionEstimator()->getNumInliers();
383  info->reg.covariance = covariance;
384 
385  if(this->isInfoDataFilled())
386  {
387  const fovis::FeatureMatch * matches = fovis_->getMotionEstimator()->getMatches();
388  int numMatches = fovis_->getMotionEstimator()->getNumMatches();
389  if(matches && numMatches>0)
390  {
391  info->refCorners.resize(numMatches);
392  info->newCorners.resize(numMatches);
393  info->cornerInliers.resize(numMatches);
394  int oi=0;
395  for (int i = 0; i < numMatches; ++i)
396  {
397  info->refCorners[i].x = matches[i].ref_keypoint->base_uv[0];
398  info->refCorners[i].y = matches[i].ref_keypoint->base_uv[1];
399  info->newCorners[i].x = matches[i].target_keypoint->base_uv[0];
400  info->newCorners[i].y = matches[i].target_keypoint->base_uv[1];
401  info->cornerInliers[oi++] = i;
402  }
403  info->cornerInliers.resize(oi);
404  }
405  }
406  }
407 
408  UINFO("Odom update time = %fs status=%s", timer.elapsed(), fovis::MotionEstimateStatusCodeStrings[statusCode]);
409 
410 #else
411  UERROR("RTAB-Map is not built with FOVIS support! Select another visual odometry approach.");
412 #endif
413  return t;
414 }
415 
416 } // namespace rtabmap
d
static ParametersMap filterParameters(const ParametersMap &parameters, const std::string &group)
Definition: Parameters.cpp:214
int imageWidth() const
Definition: CameraModel.h:120
std::vector< cv::Point2f > refCorners
Definition: OdometryInfo.h:128
Definition: UTimer.h:46
double elapsed()
Definition: UTimer.h:75
f
bool isInfoDataFilled() const
Definition: Odometry.h:74
virtual void reset(const Transform &initialPose=Transform::getIdentity())
Definition: Odometry.cpp:191
OdometryFovis(const rtabmap::ParametersMap &parameters=rtabmap::ParametersMap())
static Transform getIdentity()
Definition: Transform.cpp:380
std::map< std::string, std::string > ParametersMap
Definition: Parameters.h:43
const cv::Mat & imageRaw() const
Definition: SensorData.h:164
#define UFATAL(...)
cv::Mat rightRaw() const
Definition: SensorData.h:190
bool isIdentity() const
Definition: Transform.cpp:136
Wrappers of STL for convenient functions.
const CameraModel & left() const
double cx() const
Definition: CameraModel.h:104
bool isNull() const
Definition: Transform.cpp:107
std::vector< int > cornerInliers
Definition: OdometryInfo.h:130
double fy() const
Definition: CameraModel.h:103
ParametersMap fovisParameters_
Definition: OdometryFovis.h:64
const cv::Mat & depthOrRightRaw() const
Definition: SensorData.h:165
bool isValidForReprojection() const
Definition: CameraModel.h:88
double cy() const
Definition: CameraModel.h:105
static Transform fromEigen3d(const Eigen::Affine3d &matrix)
Definition: Transform.cpp:404
#define false
Definition: ConvertUTF.c:56
static const ParametersMap & getDefaultParameters()
Definition: Parameters.h:778
const std::vector< CameraModel > & cameraModels() const
Definition: SensorData.h:215
#define UDEBUG(...)
double fx() const
Definition: CameraModel.h:102
const CameraModel & right() const
#define UERROR(...)
ULogger class and convenient macros.
const StereoCameraModel & stereoCameraModel() const
Definition: SensorData.h:216
#define UWARN(...)
std::vector< cv::Point2f > newCorners
Definition: OdometryInfo.h:129
RegistrationInfo reg
Definition: OdometryInfo.h:96
Transform inverse() const
Definition: Transform.cpp:178
Transform previousLocalTransform_
Definition: OdometryFovis.h:65
virtual void reset(const Transform &initialPose=Transform::getIdentity())
const Transform & localTransform() const
std::string UTILITE_EXP uNumber2Str(unsigned int number)
Definition: UConversion.cpp:90
int imageHeight() const
Definition: CameraModel.h:121
virtual Transform computeTransform(SensorData &image, const Transform &guess=Transform(), OdometryInfo *info=0)
V uValue(const std::map< K, V > &m, const K &key, const V &defaultValue=V())
Definition: UStl.h:240
cv::Mat depthRaw() const
Definition: SensorData.h:189
#define UINFO(...)


rtabmap
Author(s): Mathieu Labbe
autogenerated on Mon Dec 14 2020 03:34:59