OdometryFovis.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
30 #include "rtabmap/core/util2d.h"
32 #include "rtabmap/utilite/UTimer.h"
33 #include "rtabmap/utilite/UStl.h"
34 
35 #ifdef RTABMAP_FOVIS
36 #include <libfovis/fovis.hpp>
37 #endif
38 
39 namespace rtabmap {
40 
42  Odometry(parameters)
43 #ifdef RTABMAP_FOVIS
44  ,
45  fovis_(0),
46  rect_(0),
47  stereoCalib_(0),
48  depthImage_(0),
49  stereoDepth_(0),
50  lost_(false)
51 #endif
52 {
53  fovisParameters_ = Parameters::filterParameters(parameters, "OdomFovis");
54  if(parameters.find(Parameters::kOdomVisKeyFrameThr()) != parameters.end())
55  {
56  fovisParameters_.insert(*parameters.find(Parameters::kOdomVisKeyFrameThr()));
57  }
58 }
59 
61 {
62 #ifdef RTABMAP_FOVIS
63  delete fovis_;
64  delete rect_;
65  delete stereoCalib_;
66  delete depthImage_;
67  delete stereoDepth_;
68 #endif
69 }
70 
71 void OdometryFovis::reset(const Transform & initialPose)
72 {
73  Odometry::reset(initialPose);
74 #ifdef RTABMAP_FOVIS
75  if(fovis_)
76  {
77  delete fovis_;
78  fovis_ = 0;
79  }
80  if(rect_)
81  {
82  delete rect_;
83  rect_ = 0;
84  }
85  if(stereoCalib_)
86  {
87  delete stereoCalib_;
88  stereoCalib_ = 0;
89  }
90  if(depthImage_)
91  {
92  delete depthImage_;
93  depthImage_ = 0;
94  }
95  if(stereoDepth_)
96  {
97  delete stereoDepth_;
98  stereoDepth_ = 0;
99  }
100  lost_ = false;
102 #endif
103 }
104 
105 // return not null transform if odometry is correctly computed
107  SensorData & data,
108  const Transform & guess,
109  OdometryInfo * info)
110 {
111  UDEBUG("");
112  Transform t;
113 
114 #ifdef RTABMAP_FOVIS
115  UTimer timer;
116 
117  if(data.imageRaw().empty() ||
118  data.imageRaw().rows != data.depthOrRightRaw().rows ||
119  data.imageRaw().cols != data.depthOrRightRaw().cols)
120  {
121  UERROR("Not supported input!");
122  return t;
123  }
124 
125  if(!((data.cameraModels().size() == 1 &&
126  data.cameraModels()[0].isValidForReprojection()) ||
130  {
131  UERROR("Invalid camera model! Mono cameras=%d (reproj=%d), Stereo camera=%d (reproj=%d|%d)",
132  (int)data.cameraModels().size(),
133  data.cameraModels().size() && data.cameraModels()[0].isValidForReprojection()?1:0,
137  return t;
138  }
139 
140  cv::Mat gray;
141  if(data.imageRaw().type() == CV_8UC3)
142  {
143  cv::cvtColor(data.imageRaw(), gray, CV_BGR2GRAY);
144  }
145  else if(data.imageRaw().type() == CV_8UC1)
146  {
147  gray = data.imageRaw();
148  }
149  else
150  {
151  UFATAL("Not supported color type!");
152  }
153 
154  fovis::VisualOdometryOptions options;
155  if(fovis_ == 0 || (data.cameraModels().size() != 1 && stereoDepth_ == 0))
156  {
157  options = fovis::VisualOdometry::getDefaultOptions();
158 
159  ParametersMap defaults = Parameters::getDefaultParameters("OdomFovis");
160  options["feature-window-size"] = uValue(fovisParameters_, Parameters::kOdomFovisFeatureWindowSize(), defaults.at(Parameters::kOdomFovisFeatureWindowSize()));
161  options["max-pyramid-level"] = uValue(fovisParameters_, Parameters::kOdomFovisMaxPyramidLevel(), defaults.at(Parameters::kOdomFovisMaxPyramidLevel()));
162  options["min-pyramid-level"] = uValue(fovisParameters_, Parameters::kOdomFovisMinPyramidLevel(), defaults.at(Parameters::kOdomFovisMinPyramidLevel()));
163  options["target-pixels-per-feature"] = uValue(fovisParameters_, Parameters::kOdomFovisTargetPixelsPerFeature(), defaults.at(Parameters::kOdomFovisTargetPixelsPerFeature()));
164  options["fast-threshold"] = uValue(fovisParameters_, Parameters::kOdomFovisFastThreshold(), defaults.at(Parameters::kOdomFovisFastThreshold()));
165  options["use-adaptive-threshold"] = uValue(fovisParameters_, Parameters::kOdomFovisUseAdaptiveThreshold(), defaults.at(Parameters::kOdomFovisUseAdaptiveThreshold()));
166  options["fast-threshold-adaptive-gain"] = uValue(fovisParameters_, Parameters::kOdomFovisFastThresholdAdaptiveGain(), defaults.at(Parameters::kOdomFovisFastThresholdAdaptiveGain()));
167  options["use-homography-initialization"] = uValue(fovisParameters_, Parameters::kOdomFovisUseHomographyInitialization(), defaults.at(Parameters::kOdomFovisUseHomographyInitialization()));
168  options["ref-frame-change-threshold"] = uValue(fovisParameters_, Parameters::kOdomVisKeyFrameThr(), uNumber2Str(Parameters::defaultOdomVisKeyFrameThr()));
169 
170  // OdometryFrame
171  options["use-bucketing"] = uValue(fovisParameters_, Parameters::kOdomFovisUseBucketing(), defaults.at(Parameters::kOdomFovisUseBucketing()));
172  options["bucket-width"] = uValue(fovisParameters_, Parameters::kOdomFovisBucketWidth(), defaults.at(Parameters::kOdomFovisBucketWidth()));
173  options["bucket-height"] = uValue(fovisParameters_, Parameters::kOdomFovisBucketHeight(), defaults.at(Parameters::kOdomFovisBucketHeight()));
174  options["max-keypoints-per-bucket"] = uValue(fovisParameters_, Parameters::kOdomFovisMaxKeypointsPerBucket(), defaults.at(Parameters::kOdomFovisMaxKeypointsPerBucket()));
175  options["use-image-normalization"] = uValue(fovisParameters_, Parameters::kOdomFovisUseImageNormalization(), defaults.at(Parameters::kOdomFovisUseImageNormalization()));
176 
177  // MotionEstimator
178  options["inlier-max-reprojection-error"] = uValue(fovisParameters_, Parameters::kOdomFovisInlierMaxReprojectionError(), defaults.at(Parameters::kOdomFovisInlierMaxReprojectionError()));
179  options["clique-inlier-threshold"] = uValue(fovisParameters_, Parameters::kOdomFovisCliqueInlierThreshold(), defaults.at(Parameters::kOdomFovisCliqueInlierThreshold()));
180  options["min-features-for-estimate"] = uValue(fovisParameters_, Parameters::kOdomFovisMinFeaturesForEstimate(), defaults.at(Parameters::kOdomFovisMinFeaturesForEstimate()));
181  options["max-mean-reprojection-error"] = uValue(fovisParameters_, Parameters::kOdomFovisMaxMeanReprojectionError(), defaults.at(Parameters::kOdomFovisMaxMeanReprojectionError()));
182  options["use-subpixel-refinement"] = uValue(fovisParameters_, Parameters::kOdomFovisUseSubpixelRefinement(), defaults.at(Parameters::kOdomFovisUseSubpixelRefinement()));
183  options["feature-search-window"] = uValue(fovisParameters_, Parameters::kOdomFovisFeatureSearchWindow(), defaults.at(Parameters::kOdomFovisFeatureSearchWindow()));
184  options["update-target-features-with-refined"] = uValue(fovisParameters_, Parameters::kOdomFovisUpdateTargetFeaturesWithRefined(), defaults.at(Parameters::kOdomFovisUpdateTargetFeaturesWithRefined()));
185 
186  // StereoDepth
187  options["stereo-require-mutual-match"] = uValue(fovisParameters_, Parameters::kOdomFovisStereoRequireMutualMatch(), defaults.at(Parameters::kOdomFovisStereoRequireMutualMatch()));
188  options["stereo-max-dist-epipolar-line"] = uValue(fovisParameters_, Parameters::kOdomFovisStereoMaxDistEpipolarLine(), defaults.at(Parameters::kOdomFovisStereoMaxDistEpipolarLine()));
189  options["stereo-max-refinement-displacement"] = uValue(fovisParameters_, Parameters::kOdomFovisStereoMaxRefinementDisplacement(), defaults.at(Parameters::kOdomFovisStereoMaxRefinementDisplacement()));
190  options["stereo-max-disparity"] = uValue(fovisParameters_, Parameters::kOdomFovisStereoMaxDisparity(), defaults.at(Parameters::kOdomFovisStereoMaxDisparity()));
191  }
192 
193  fovis::DepthSource * depthSource = 0;
194  cv::Mat depth;
195  cv::Mat right;
196  Transform localTransform = Transform::getIdentity();
197  if(data.cameraModels().size() == 1) //depth
198  {
199  UDEBUG("");
200  fovis::CameraIntrinsicsParameters rgb_params;
201  memset(&rgb_params, 0, sizeof(fovis::CameraIntrinsicsParameters));
202  rgb_params.width = data.cameraModels()[0].imageWidth();
203  rgb_params.height = data.cameraModels()[0].imageHeight();
204 
205  rgb_params.fx = data.cameraModels()[0].fx();
206  rgb_params.fy = data.cameraModels()[0].fy();
207  rgb_params.cx = data.cameraModels()[0].cx()==0.0?double(rgb_params.width) / 2.0:data.cameraModels()[0].cx();
208  rgb_params.cy = data.cameraModels()[0].cy()==0.0?double(rgb_params.height) / 2.0:data.cameraModels()[0].cy();
209  localTransform = data.cameraModels()[0].localTransform();
210 
211  if(rect_ == 0)
212  {
213  UINFO("Init rgbd fovis: %dx%d fx=%f fy=%f cx=%f cy=%f", rgb_params.width, rgb_params.height, rgb_params.fx, rgb_params.fy, rgb_params.cx, rgb_params.cy);
214  rect_ = new fovis::Rectification(rgb_params);
215  }
216 
217  if(depthImage_ == 0)
218  {
219  depthImage_ = new fovis::DepthImage(rgb_params, rgb_params.width, rgb_params.height);
220  }
221  // make sure all zeros are NAN
222  if(data.depthRaw().type() == CV_32FC1)
223  {
224  depth = data.depthRaw();
225  for(int i=0; i<depth.rows; ++i)
226  {
227  for(int j=0; j<depth.cols; ++j)
228  {
229  float & d = depth.at<float>(i,j);
230  if(d == 0.0f)
231  {
232  d = NAN;
233  }
234  }
235  }
236  }
237  else if(data.depthRaw().type() == CV_16UC1)
238  {
239  depth = cv::Mat(data.depthRaw().size(), CV_32FC1);
240  for(int i=0; i<data.depthRaw().rows; ++i)
241  {
242  for(int j=0; j<data.depthRaw().cols; ++j)
243  {
244  float d = float(data.depthRaw().at<unsigned short>(i,j))/1000.0f;
245  depth.at<float>(i, j) = d==0.0f?NAN:d;
246  }
247  }
248  }
249  else
250  {
251  UFATAL("Unknown depth format!");
252  }
253  depthImage_->setDepthImage((float*)depth.data);
254  depthSource = depthImage_;
255  }
256  else // stereo
257  {
258  UDEBUG("");
259  // initialize left camera parameters
260  fovis::CameraIntrinsicsParameters left_parameters;
261  left_parameters.width = data.stereoCameraModel().left().imageWidth();
262  left_parameters.height = data.stereoCameraModel().left().imageHeight();
263  left_parameters.fx = data.stereoCameraModel().left().fx();
264  left_parameters.fy = data.stereoCameraModel().left().fy();
265  left_parameters.cx = data.stereoCameraModel().left().cx()==0.0?double(left_parameters.width) / 2.0:data.stereoCameraModel().left().cx();
266  left_parameters.cy = data.stereoCameraModel().left().cy()==0.0?double(left_parameters.height) / 2.0:data.stereoCameraModel().left().cy();
267  localTransform = data.stereoCameraModel().localTransform();
268 
269  if(rect_ == 0)
270  {
271  UINFO("Init stereo fovis: %dx%d fx=%f fy=%f cx=%f cy=%f", left_parameters.width, left_parameters.height, left_parameters.fx, left_parameters.fy, left_parameters.cx, left_parameters.cy);
272  rect_ = new fovis::Rectification(left_parameters);
273  }
274 
275  if(stereoCalib_ == 0)
276  {
277  // initialize right camera parameters
278  fovis::CameraIntrinsicsParameters right_parameters;
279  right_parameters.width = data.stereoCameraModel().right().imageWidth();
280  right_parameters.height = data.stereoCameraModel().right().imageHeight();
281  right_parameters.fx = data.stereoCameraModel().right().fx();
282  right_parameters.fy = data.stereoCameraModel().right().fy();
283  right_parameters.cx = data.stereoCameraModel().right().cx()==0.0?double(right_parameters.width) / 2.0:data.stereoCameraModel().right().cx();
284  right_parameters.cy = data.stereoCameraModel().right().cy()==0.0?double(right_parameters.height) / 2.0:data.stereoCameraModel().right().cy();
285 
286  // as we use rectified images, rotation is identity
287  // and translation is baseline only
288  fovis::StereoCalibrationParameters stereo_parameters;
289  stereo_parameters.left_parameters = left_parameters;
290  stereo_parameters.right_parameters = right_parameters;
291  stereo_parameters.right_to_left_rotation[0] = 1.0;
292  stereo_parameters.right_to_left_rotation[1] = 0.0;
293  stereo_parameters.right_to_left_rotation[2] = 0.0;
294  stereo_parameters.right_to_left_rotation[3] = 0.0;
295  stereo_parameters.right_to_left_translation[0] = -data.stereoCameraModel().baseline();
296  stereo_parameters.right_to_left_translation[1] = 0.0;
297  stereo_parameters.right_to_left_translation[2] = 0.0;
298 
299  stereoCalib_ = new fovis::StereoCalibration(stereo_parameters);
300  }
301 
302  if(stereoDepth_ == 0)
303  {
304  stereoDepth_ = new fovis::StereoDepth(stereoCalib_, options);
305  }
306  if(data.rightRaw().type() == CV_8UC3)
307  {
308  cv::cvtColor(data.rightRaw(), right, CV_BGR2GRAY);
309  }
310  else if(data.rightRaw().type() == CV_8UC1)
311  {
312  right = data.rightRaw();
313  }
314  else
315  {
316  UFATAL("Not supported color type!");
317  }
318  stereoDepth_->setRightImage(right.data);
319  depthSource = stereoDepth_;
320  }
321 
322  if(fovis_ == 0)
323  {
324  fovis_ = new fovis::VisualOdometry(rect_, options);
325  }
326 
327  UDEBUG("");
328  fovis_->processFrame(gray.data, depthSource);
329 
330  // get the motion estimate for this frame to the previous frame.
331  t = Transform::fromEigen3d(fovis_->getMotionEstimate());
332 
333  cv::Mat covariance;
334  fovis::MotionEstimateStatusCode statusCode = fovis_->getMotionEstimator()->getMotionEstimateStatus();
335  if(statusCode > fovis::SUCCESS)
336  {
337  UWARN("Fovis error status: %s", fovis::MotionEstimateStatusCodeStrings[statusCode]);
338  t.setNull();
339  lost_ = true;
340  covariance = cv::Mat::eye(6,6, CV_64FC1)*9999.0;
342  }
343  else if(lost_)
344  {
345  lost_ = false;
346  // we are not lost anymore but we don't know where we are now according to last valid pose
347  covariance = cv::Mat::eye(6,6, CV_64FC1)*9999.0;
349  }
350  else
351  {
352  const Eigen::MatrixXd& cov = fovis_->getMotionEstimator()->getMotionEstimateCov();
353  if(cov.cols() == 6 && cov.rows() == 6 && cov(0,0) > 0.0)
354  {
355  covariance = cv::Mat::eye(6,6, CV_64FC1);
356  memcpy(covariance.data, cov.data(), 36*sizeof(double));
357  covariance *= 100.0; // to be in the same scale than loop closure detection
358  }
359  }
360 
361  if(!t.isNull() && !t.isIdentity() && !localTransform.isIdentity() && !localTransform.isNull())
362  {
363  // from camera frame to base frame
365  {
366  t = previousLocalTransform_ * t * localTransform.inverse();
367  }
368  else
369  {
370  t = localTransform * t * localTransform.inverse();
371  }
372  previousLocalTransform_ = localTransform;
373  }
374 
375  if(info)
376  {
377  info->type = (int)kTypeFovis;
378  info->keyFrameAdded = fovis_->getChangeReferenceFrames();
379  info->features = fovis_->getTargetFrame()->getNumDetectedKeypoints();
380  info->reg.matches = fovis_->getMotionEstimator()->getNumMatches();
381  info->reg.inliers = fovis_->getMotionEstimator()->getNumInliers();
382  info->reg.covariance = covariance;
383 
384  if(this->isInfoDataFilled())
385  {
386  const fovis::FeatureMatch * matches = fovis_->getMotionEstimator()->getMatches();
387  int numMatches = fovis_->getMotionEstimator()->getNumMatches();
388  if(matches && numMatches>0)
389  {
390  info->refCorners.resize(numMatches);
391  info->newCorners.resize(numMatches);
392  info->cornerInliers.resize(numMatches);
393  int oi=0;
394  for (int i = 0; i < numMatches; ++i)
395  {
396  info->refCorners[i].x = matches[i].ref_keypoint->base_uv[0];
397  info->refCorners[i].y = matches[i].ref_keypoint->base_uv[1];
398  info->newCorners[i].x = matches[i].target_keypoint->base_uv[0];
399  info->newCorners[i].y = matches[i].target_keypoint->base_uv[1];
400  info->cornerInliers[oi++] = i;
401  }
402  info->cornerInliers.resize(oi);
403  }
404  }
405  }
406 
407  UINFO("Odom update time = %fs status=%s", timer.elapsed(), fovis::MotionEstimateStatusCodeStrings[statusCode]);
408 
409 #else
410  UERROR("RTAB-Map is not built with FOVIS support! Select another visual odometry approach.");
411 #endif
412  return t;
413 }
414 
415 } // namespace rtabmap
d
static ParametersMap filterParameters(const ParametersMap &parameters, const std::string &group)
Definition: Parameters.cpp:209
int imageWidth() const
Definition: CameraModel.h:113
std::vector< cv::Point2f > refCorners
Definition: OdometryInfo.h:120
Definition: UTimer.h:46
double elapsed()
Definition: UTimer.h:75
f
bool isInfoDataFilled() const
Definition: Odometry.h:72
virtual void reset(const Transform &initialPose=Transform::getIdentity())
Definition: Odometry.cpp:182
OdometryFovis(const rtabmap::ParametersMap &parameters=rtabmap::ParametersMap())
static Transform getIdentity()
Definition: Transform.cpp:364
std::map< std::string, std::string > ParametersMap
Definition: Parameters.h:41
const cv::Mat & imageRaw() const
Definition: SensorData.h:161
#define UFATAL(...)
cv::Mat rightRaw() const
Definition: SensorData.h:173
bool isIdentity() const
Definition: Transform.cpp:136
Wrappers of STL for convenient functions.
const CameraModel & left() const
double cx() const
Definition: CameraModel.h:97
bool isNull() const
Definition: Transform.cpp:107
std::vector< int > cornerInliers
Definition: OdometryInfo.h:122
double fy() const
Definition: CameraModel.h:96
ParametersMap fovisParameters_
Definition: OdometryFovis.h:64
const cv::Mat & depthOrRightRaw() const
Definition: SensorData.h:162
bool isValidForReprojection() const
Definition: CameraModel.h:81
double cy() const
Definition: CameraModel.h:98
static Transform fromEigen3d(const Eigen::Affine3d &matrix)
Definition: Transform.cpp:388
#define false
Definition: ConvertUTF.c:56
static const ParametersMap & getDefaultParameters()
Definition: Parameters.h:695
const std::vector< CameraModel > & cameraModels() const
Definition: SensorData.h:193
#define UDEBUG(...)
double fx() const
Definition: CameraModel.h:95
const CameraModel & right() const
#define UERROR(...)
ULogger class and convenient macros.
const StereoCameraModel & stereoCameraModel() const
Definition: SensorData.h:194
#define UWARN(...)
std::vector< cv::Point2f > newCorners
Definition: OdometryInfo.h:121
RegistrationInfo reg
Definition: OdometryInfo.h:91
Transform inverse() const
Definition: Transform.cpp:169
Transform previousLocalTransform_
Definition: OdometryFovis.h:65
virtual void reset(const Transform &initialPose=Transform::getIdentity())
const Transform & localTransform() const
std::string UTILITE_EXP uNumber2Str(unsigned int number)
Definition: UConversion.cpp:90
int imageHeight() const
Definition: CameraModel.h:114
virtual Transform computeTransform(SensorData &image, const Transform &guess=Transform(), OdometryInfo *info=0)
V uValue(const std::map< K, V > &m, const K &key, const V &defaultValue=V())
Definition: UStl.h:240
cv::Mat depthRaw() const
Definition: SensorData.h:172
#define UINFO(...)


rtabmap
Author(s): Mathieu Labbe
autogenerated on Wed Jun 5 2019 22:41:32