OdometryORBSLAM2.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
30 #include "rtabmap/core/util2d.h"
33 #include "rtabmap/utilite/UTimer.h"
34 #include "rtabmap/utilite/UStl.h"
36 #include <pcl/common/transforms.h>
37 
38 #ifdef RTABMAP_ORB_SLAM2
39 #include <System.h>
40 #include <thread>
41 
42 using namespace std;
43 
44 namespace ORB_SLAM2 {
45 // Override original Tracking object to comment all rendering stuff
46 class Tracker: public Tracking
47 {
48 public:
49  Tracker(System* pSys, ORBVocabulary* pVoc, FrameDrawer* pFrameDrawer, MapDrawer* pMapDrawer, Map* pMap,
50  KeyFrameDatabase* pKFDB, const std::string &strSettingPath, const int sensor, long unsigned int maxFeatureMapSize) :
51  Tracking(pSys, pVoc, pFrameDrawer, pMapDrawer, pMap, pKFDB, strSettingPath, sensor),
52  maxFeatureMapSize_(maxFeatureMapSize)
53  {
54 
55  }
56 private:
57  long unsigned int maxFeatureMapSize_;
58 
59 protected:
60  void Track()
61  {
62  if(mState==NO_IMAGES_YET)
63  {
64  mState = NOT_INITIALIZED;
65  }
66 
67  mLastProcessedState=mState;
68 
69  // Get Map Mutex -> Map cannot be changed
70  unique_lock<mutex> lock(mpMap->mMutexMapUpdate);
71 
72  if(mState==NOT_INITIALIZED)
73  {
74  // if(mSensor==System::STEREO || mSensor==System::RGBD)
75  StereoInitialization();
76  //else
77  // MonocularInitialization();
78 
79  //mpFrameDrawer->Update(this);
80 
81  if(mState!=OK)
82  return;
83  }
84  else
85  {
86  // System is initialized. Track Frame.
87  bool bOK;
88 
89  // Initial camera pose estimation using motion model or relocalization (if tracking is lost)
90  if(!mbOnlyTracking)
91  {
92  // Local Mapping is activated. This is the normal behaviour, unless
93  // you explicitly activate the "only tracking" mode.
94 
95  if(mState==OK || mState==LOST)
96  {
97  // Local Mapping might have changed some MapPoints tracked in last frame
98  CheckReplacedInLastFrame();
99 
100  if(mVelocity.empty() || mCurrentFrame.mnId<mnLastRelocFrameId+2)
101  {
102  bOK = TrackReferenceKeyFrame();
103  }
104  else
105  {
106  bOK = TrackWithMotionModel();
107  if(!bOK)
108  bOK = TrackReferenceKeyFrame();
109  }
110  if(bOK)
111  {
112  mState=OK;
113  }
114  }
115  else
116  {
117  bOK = Relocalization();
118  }
119  }
120  else
121  {
122  // Localization Mode: Local Mapping is deactivated
123 
124  if(mState==LOST)
125  {
126  bOK = Relocalization();
127  }
128  else
129  {
130  if(!mbVO)
131  {
132  // In last frame we tracked enough MapPoints in the map
133 
134  if(!mVelocity.empty())
135  {
136  bOK = TrackWithMotionModel();
137  }
138  else
139  {
140  bOK = TrackReferenceKeyFrame();
141  }
142  }
143  else
144  {
145  // In last frame we tracked mainly "visual odometry" points.
146 
147  // We compute two camera poses, one from motion model and one doing relocalization.
148  // If relocalization is sucessfull we choose that solution, otherwise we retain
149  // the "visual odometry" solution.
150 
151  bool bOKMM = false;
152  bool bOKReloc = false;
153  std::vector<MapPoint*> vpMPsMM;
154  std::vector<bool> vbOutMM;
155  cv::Mat TcwMM;
156  if(!mVelocity.empty())
157  {
158  bOKMM = TrackWithMotionModel();
159  vpMPsMM = mCurrentFrame.mvpMapPoints;
160  vbOutMM = mCurrentFrame.mvbOutlier;
161  TcwMM = mCurrentFrame.mTcw.clone();
162  }
163  bOKReloc = Relocalization();
164 
165  if(bOKMM && !bOKReloc)
166  {
167  mCurrentFrame.SetPose(TcwMM);
168  mCurrentFrame.mvpMapPoints = vpMPsMM;
169  mCurrentFrame.mvbOutlier = vbOutMM;
170 
171  if(mbVO)
172  {
173  for(int i =0; i<mCurrentFrame.N; i++)
174  {
175  if(mCurrentFrame.mvpMapPoints[i] && !mCurrentFrame.mvbOutlier[i])
176  {
177  mCurrentFrame.mvpMapPoints[i]->IncreaseFound();
178  }
179  }
180  }
181  }
182  else if(bOKReloc)
183  {
184  mbVO = false;
185  }
186 
187  bOK = bOKReloc || bOKMM;
188  }
189  }
190  }
191 
192  mCurrentFrame.mpReferenceKF = mpReferenceKF;
193 
194  // If we have an initial estimation of the camera pose and matching. Track the local map.
195  if(!mbOnlyTracking)
196  {
197  if(bOK)
198  bOK = TrackLocalMap();
199  }
200  else
201  {
202  // mbVO true means that there are few matches to MapPoints in the map. We cannot retrieve
203  // a local map and therefore we do not perform TrackLocalMap(). Once the system relocalizes
204  // the camera we will use the local map again.
205  if(bOK && !mbVO)
206  bOK = TrackLocalMap();
207  }
208 
209  if(bOK)
210  mState = OK;
211  else
212  mState=LOST;
213 
214  // Update drawer
215  //mpFrameDrawer->Update(this);
216 
217  // If tracking were good, check if we insert a keyframe
218  if(bOK)
219  {
220  // Update motion model
221  if(!mLastFrame.mTcw.empty())
222  {
223  cv::Mat LastTwc = cv::Mat::eye(4,4,CV_32F);
224  mLastFrame.GetRotationInverse().copyTo(LastTwc.rowRange(0,3).colRange(0,3));
225  mLastFrame.GetCameraCenter().copyTo(LastTwc.rowRange(0,3).col(3));
226  mVelocity = mCurrentFrame.mTcw*LastTwc;
227  }
228  else
229  mVelocity = cv::Mat();
230 
231  //mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.mTcw);
232 
233  // Clean VO matches
234  for(int i=0; i<mCurrentFrame.N; i++)
235  {
236  MapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
237  if(pMP)
238  if(pMP->Observations()<1)
239  {
240  mCurrentFrame.mvbOutlier[i] = false;
241  mCurrentFrame.mvpMapPoints[i]=static_cast<MapPoint*>(NULL);
242  }
243  }
244 
245  // Delete temporal MapPoints
246  for(list<MapPoint*>::iterator lit = mlpTemporalPoints.begin(), lend = mlpTemporalPoints.end(); lit!=lend; lit++)
247  {
248  MapPoint* pMP = *lit;
249  delete pMP;
250  }
251  mlpTemporalPoints.clear();
252 
253  // Check if we need to insert a new keyframe
254  if(NeedNewKeyFrame())
255  {
256  CreateNewKeyFrame();
257  }
258 
259  if(maxFeatureMapSize_ > 0)
260  {
261  //limit size of the feature map, keep last X recent ones
262  if(mpMap->KeyFramesInMap()>1 && mpMap->MapPointsInMap()>maxFeatureMapSize_)
263  {
264  std::vector<KeyFrame*> kfs = mpMap->GetAllKeyFrames();
265  std::map<long unsigned int, KeyFrame*> kfsSorted;
266  for(unsigned int i=1; i<kfs.size(); ++i)
267  {
268  kfsSorted.insert(std::make_pair(kfs[i]->mnId, kfs[i]));
269  }
270  KeyFrame * lastFrame = kfsSorted.rbegin()->second;
271  std::vector<MapPoint*> mapPoints = mpMap->GetAllMapPoints();
272  std::map<long unsigned int, MapPoint*> mapPointsSorted;
273  for(unsigned int i=0; i<mapPoints.size(); ++i)
274  {
275  mapPointsSorted.insert(std::make_pair(mapPoints[i]->mnId, mapPoints[i]));
276  }
277 
278  for(std::map<long unsigned int, MapPoint*>::iterator iter=mapPointsSorted.begin();
279  iter != mapPointsSorted.end() && mpMap->MapPointsInMap()>maxFeatureMapSize_;
280  ++iter)
281  {
282  if(!iter->second->IsInKeyFrame(lastFrame))
283  {
284  // FIXME: Memory leak: ORB_SLAM2 doesn't delete after removing from the map...
285  // Not sure when it is safe to delete it, as if I delete just
286  // after setting the bad flag, the app crashes.
287  iter->second->SetBadFlag();
288  }
289  }
290  // remove kfs without observations
291  for(std::map<long unsigned int, KeyFrame*>::iterator iter=kfsSorted.begin();
292  iter != kfsSorted.end();
293  ++iter)
294  {
295  if(iter->second!=lastFrame && iter->second->GetMapPoints().size()==0)
296  {
297  // FIXME: Memory leak: ORB_SLAM2 doesn't delete after removing from the map...
298  // Not sure when it is safe to delete it, as if I delete just
299  // after setting the bad flag, the app crashes.
300  iter->second->SetErase();
301  }
302  else
303  {
304  break;
305  }
306  }
307  }
308  }
309 
310  // We allow points with high innovation (considererd outliers by the Huber Function)
311  // pass to the new keyframe, so that bundle adjustment will finally decide
312  // if they are outliers or not. We don't want next frame to estimate its position
313  // with those points so we discard them in the frame.
314  for(int i=0; i<mCurrentFrame.N;i++)
315  {
316  if(mCurrentFrame.mvpMapPoints[i] && mCurrentFrame.mvbOutlier[i])
317  mCurrentFrame.mvpMapPoints[i]=static_cast<MapPoint*>(NULL);
318  }
319  }
320 
321  // Reset if the camera get lost soon after initialization
322  if(mState==LOST)
323  {
324  //if(mpMap->KeyFramesInMap()<=5)
325  {
326  UWARN("Track lost...");
327  return;
328 
329  }
330  }
331 
332  if(!mCurrentFrame.mpReferenceKF)
333  mCurrentFrame.mpReferenceKF = mpReferenceKF;
334 
335  mLastFrame = Frame(mCurrentFrame);
336  }
337 
338  // Store frame pose information to retrieve the complete camera trajectory afterwards.
339  if(!mCurrentFrame.mTcw.empty())
340  {
341  cv::Mat Tcr = mCurrentFrame.mTcw*mCurrentFrame.mpReferenceKF->GetPoseInverse();
342  mlRelativeFramePoses.push_back(Tcr);
343  mlpReferences.push_back(mpReferenceKF);
344  mlFrameTimes.push_back(mCurrentFrame.mTimeStamp);
345  mlbLost.push_back(mState==LOST);
346  }
347  else
348  {
349  // This can happen if tracking is lost
350  mlRelativeFramePoses.push_back(mlRelativeFramePoses.back());
351  mlpReferences.push_back(mlpReferences.back());
352  mlFrameTimes.push_back(mlFrameTimes.back());
353  mlbLost.push_back(mState==LOST);
354  }
355  }
356 
357  void StereoInitialization()
358  {
359  if(mCurrentFrame.N>500)
360  {
361  // Set Frame pose to the origin
362  mCurrentFrame.SetPose(cv::Mat::eye(4,4,CV_32F));
363 
364  // Create KeyFrame
365  KeyFrame* pKFini = new KeyFrame(mCurrentFrame,mpMap,mpKeyFrameDB);
366 
367  // Insert KeyFrame in the map
368  mpMap->AddKeyFrame(pKFini);
369 
370  // Create MapPoints and asscoiate to KeyFrame
371  for(int i=0; i<mCurrentFrame.N;i++)
372  {
373  float z = mCurrentFrame.mvDepth[i];
374  if(z>0)
375  {
376  cv::Mat x3D = mCurrentFrame.UnprojectStereo(i);
377  MapPoint* pNewMP = new MapPoint(x3D,pKFini,mpMap);
378  pNewMP->AddObservation(pKFini,i);
379  pKFini->AddMapPoint(pNewMP,i);
380  pNewMP->ComputeDistinctiveDescriptors();
381  pNewMP->UpdateNormalAndDepth();
382  mpMap->AddMapPoint(pNewMP);
383 
384  mCurrentFrame.mvpMapPoints[i]=pNewMP;
385  }
386  }
387 
388  cout << "New map created with " << mpMap->MapPointsInMap() << " points" << endl;
389 
390  mpLocalMapper->InsertKeyFrame(pKFini);
391 
392  mLastFrame = Frame(mCurrentFrame);
393  mnLastKeyFrameId=mCurrentFrame.mnId;
394  mpLastKeyFrame = pKFini;
395 
396  mvpLocalKeyFrames.push_back(pKFini);
397  mvpLocalMapPoints=mpMap->GetAllMapPoints();
398  mpReferenceKF = pKFini;
399  mCurrentFrame.mpReferenceKF = pKFini;
400 
401  mpMap->SetReferenceMapPoints(mvpLocalMapPoints);
402 
403  mpMap->mvpKeyFrameOrigins.push_back(pKFini);
404 
405  //mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.mTcw);
406 
407  mState=OK;
408  }
409  }
410 
411 public:
412  cv::Mat GrabImageStereo(const cv::Mat &imRectLeft, const cv::Mat &imRectRight, const double &timestamp)
413  {
414  mImGray = imRectLeft;
415  cv::Mat imGrayRight = imRectRight;
416 
417  if(mImGray.channels()==3)
418  {
419  if(mbRGB)
420  {
421  cvtColor(mImGray,mImGray,CV_RGB2GRAY);
422  }
423  else
424  {
425  cvtColor(mImGray,mImGray,CV_BGR2GRAY);
426  }
427  }
428  else if(mImGray.channels()==4)
429  {
430  if(mbRGB)
431  {
432  cvtColor(mImGray,mImGray,CV_RGBA2GRAY);
433  }
434  else
435  {
436  cvtColor(mImGray,mImGray,CV_BGRA2GRAY);
437  }
438  }
439  if(imGrayRight.channels()==3)
440  {
441  if(mbRGB)
442  {
443  cvtColor(imGrayRight,imGrayRight,CV_RGB2GRAY);
444  }
445  else
446  {
447  cvtColor(imGrayRight,imGrayRight,CV_BGR2GRAY);
448  }
449  }
450  else if(imGrayRight.channels()==4)
451  {
452  if(mbRGB)
453  {
454  cvtColor(imGrayRight,imGrayRight,CV_RGBA2GRAY);
455  }
456  else
457  {
458  cvtColor(imGrayRight,imGrayRight,CV_BGRA2GRAY);
459  }
460  }
461 
462  mCurrentFrame = Frame(mImGray,imGrayRight,timestamp,mpORBextractorLeft,mpORBextractorRight,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth);
463 
464  Track();
465 
466  return mCurrentFrame.mTcw.clone();
467  }
468 
469  cv::Mat GrabImageRGBD(const cv::Mat &imRGB,const cv::Mat &imD, const double &timestamp)
470  {
471  mImGray = imRGB;
472  cv::Mat imDepth = imD;
473 
474  if(mImGray.channels()==3)
475  {
476  if(mbRGB)
477  cvtColor(mImGray,mImGray,CV_RGB2GRAY);
478  else
479  cvtColor(mImGray,mImGray,CV_BGR2GRAY);
480  }
481  else if(mImGray.channels()==4)
482  {
483  if(mbRGB)
484  cvtColor(mImGray,mImGray,CV_RGBA2GRAY);
485  else
486  cvtColor(mImGray,mImGray,CV_BGRA2GRAY);
487  }
488 
489  UASSERT(imDepth.type()==CV_32F);
490 
491  mCurrentFrame = Frame(mImGray,imDepth,timestamp,mpORBextractorLeft,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth);
492 
493  Track();
494 
495  return mCurrentFrame.mTcw.clone();
496  }
497 };
498 
499 // Hack to disable loop closing
500 class LoopCloser: public LoopClosing
501 {
502 public:
503  LoopCloser(Map* pMap, KeyFrameDatabase* pDB, ORBVocabulary* pVoc,const bool bFixScale) :
504  LoopClosing(pMap, pDB, pVoc, bFixScale)
505  {
506  }
507 
508 public:
509  void RunNoLoop()
510  {
511  mbFinished =false;
512 
513  while(1)
514  {
515  // just clear the buffer
516  {
517  unique_lock<mutex> lock(mMutexLoopQueue);
518  mlpLoopKeyFrameQueue.clear();
519  }
520 
521  ResetIfRequested();
522 
523  if(CheckFinish())
524  break;
525 
526  usleep(1000000); // 1 sec
527  }
528 
529  SetFinish();
530  }
531 };
532 
533 } // namespace ORB_SLAM2
534 
535 class ORBSLAM2System
536 {
537 public:
538  ORBSLAM2System(const rtabmap::ParametersMap & parameters) :
539  mpVocabulary(0),
540  mpKeyFrameDatabase(0),
541  mpMap(0),
542  mpTracker(0),
543  mpLocalMapper(0),
544  mpLoopCloser(0),
545  mptLocalMapping(0),
546  mptLoopClosing(0),
547  parameters_(parameters)
548  {
549  std::string vocabularyPath;
550  rtabmap::Parameters::parse(parameters, rtabmap::Parameters::kOdomORBSLAM2VocPath(), vocabularyPath);
551 
552  if(!vocabularyPath.empty())
553  {
554  //Load ORB Vocabulary
555  vocabularyPath = uReplaceChar(vocabularyPath, '~', UDirectory::homeDir());
556  UWARN("Loading ORB Vocabulary: \"%s\". This could take a while...", vocabularyPath.c_str());
557  mpVocabulary = new ORB_SLAM2::ORBVocabulary();
558  bool bVocLoad = mpVocabulary->loadFromTextFile(vocabularyPath);
559  if(!bVocLoad)
560  {
561  UERROR("Failed to open vocabulary at %s", vocabularyPath.c_str());
562  delete mpVocabulary;
563  mpVocabulary = 0;
564  }
565  else
566  {
567  UWARN("Vocabulary loaded!");
568  }
569  }
570  else
571  {
572  UERROR("ORBSLAM2 vocabulary path should be set! (Parameter name=\"%s\")", rtabmap::Parameters::kOdomORBSLAM2VocPath().c_str());
573  }
574  }
575 
576  bool init(const rtabmap::CameraModel & model, bool stereo, double baseline)
577  {
578  if(!mpVocabulary)
579  {
580  UERROR("Vocabulary not loaded!");
581  return false;
582  }
583 
584  this->shutdown();
585 
586  // Create configuration file
587  std::string workingDir;
588  rtabmap::Parameters::parse(parameters_, rtabmap::Parameters::kRtabmapWorkingDirectory(), workingDir);
589  if(workingDir.empty())
590  {
591  workingDir = ".";
592  }
593  std::string configPath = workingDir+"/rtabmap_orbslam2.yaml";
594  std::ofstream ofs (configPath, std::ofstream::out);
595  ofs << "%YAML:1.0" << std::endl;
596  ofs << std::endl;
597 
598  //# Camera calibration and distortion parameters (OpenCV)
599  ofs << "Camera.fx: " << model.fx() << std::endl;
600  ofs << "Camera.fy: " << model.fy() << std::endl;
601  ofs << "Camera.cx: " << model.cx() << std::endl;
602  ofs << "Camera.cy: " << model.cy() << std::endl;
603  ofs << std::endl;
604 
605  if(model.D().cols < 4)
606  {
607  ofs << "Camera.k1: " << 0 << std::endl;
608  ofs << "Camera.k2: " << 0 << std::endl;
609  ofs << "Camera.p1: " << 0 << std::endl;
610  ofs << "Camera.p2: " << 0 << std::endl;
611  if(!stereo)
612  {
613  ofs << "Camera.k3: " << 0 << std::endl;
614  }
615  }
616  if(model.D().cols >= 4)
617  {
618  ofs << "Camera.k1: " << model.D().at<double>(0,0) << std::endl;
619  ofs << "Camera.k2: " << model.D().at<double>(0,1) << std::endl;
620  ofs << "Camera.p1: " << model.D().at<double>(0,2) << std::endl;
621  ofs << "Camera.p2: " << model.D().at<double>(0,3) << std::endl;
622  }
623  if(model.D().cols >= 5)
624  {
625  ofs << "Camera.k3: " << model.D().at<double>(0,4) << std::endl;
626  }
627  if(model.D().cols > 5)
628  {
629  UWARN("Unhandled camera distortion size %d, only 5 first coefficients used", model.D().cols);
630  }
631  ofs << std::endl;
632 
633  ofs << "Camera.width: " << model.imageWidth() << std::endl;
634  ofs << "Camera.height: " << model.imageHeight() << std::endl;
635  ofs << std::endl;
636 
637  //# IR projector baseline times fx (aprox.)
638  if(baseline <= 0.0)
639  {
640  baseline = rtabmap::Parameters::defaultOdomORBSLAM2Bf();
641  rtabmap::Parameters::parse(parameters_, rtabmap::Parameters::kOdomORBSLAM2Bf(), baseline);
642  }
643  ofs << "Camera.bf: " << model.fx()*baseline << std::endl;
644  ofs << std::endl;
645 
646  //# Color order of the images (0: BGR, 1: RGB. It is ignored if images are grayscale)
647  //Camera.RGB: 1
648  ofs << "Camera.RGB: 1" << std::endl;
649  ofs << std::endl;
650 
651  float fps = rtabmap::Parameters::defaultOdomORBSLAM2Fps();
652  rtabmap::Parameters::parse(parameters_, rtabmap::Parameters::kOdomORBSLAM2Fps(), fps);
653  ofs << "Camera.fps: " << fps << std::endl;
654  ofs << std::endl;
655 
656  //# Close/Far threshold. Baseline times.
657  double thDepth = rtabmap::Parameters::defaultOdomORBSLAM2ThDepth();
658  rtabmap::Parameters::parse(parameters_, rtabmap::Parameters::kOdomORBSLAM2ThDepth(), thDepth);
659  ofs << "ThDepth: " << thDepth << std::endl;
660  ofs << std::endl;
661 
662  //# Deptmap values factor
663  ofs << "DepthMapFactor: " << 1000.0 << std::endl;
664  ofs << std::endl;
665 
666  //#--------------------------------------------------------------------------------------------
667  //# ORB Parameters
668  //#--------------------------------------------------------------------------------------------
669  //# ORB Extractor: Number of features per image
670  int features = rtabmap::Parameters::defaultOdomORBSLAM2MaxFeatures();
671  rtabmap::Parameters::parse(parameters_, rtabmap::Parameters::kOdomORBSLAM2MaxFeatures(), features);
672  ofs << "ORBextractor.nFeatures: " << features << std::endl;
673  ofs << std::endl;
674 
675  //# ORB Extractor: Scale factor between levels in the scale pyramid
676  double scaleFactor = rtabmap::Parameters::defaultORBScaleFactor();
677  rtabmap::Parameters::parse(parameters_, rtabmap::Parameters::kORBScaleFactor(), scaleFactor);
678  ofs << "ORBextractor.scaleFactor: " << scaleFactor << std::endl;
679  ofs << std::endl;
680 
681  //# ORB Extractor: Number of levels in the scale pyramid
682  int levels = rtabmap::Parameters::defaultORBNLevels();
683  rtabmap::Parameters::parse(parameters_, rtabmap::Parameters::kORBNLevels(), levels);
684  ofs << "ORBextractor.nLevels: " << levels << std::endl;
685  ofs << std::endl;
686 
687  //# ORB Extractor: Fast threshold
688  //# Image is divided in a grid. At each cell FAST are extracted imposing a minimum response.
689  //# Firstly we impose iniThFAST. If no corners are detected we impose a lower value minThFAST
690  //# You can lower these values if your images have low contrast
691  int iniThFAST = rtabmap::Parameters::defaultFASTThreshold();
692  int minThFAST = rtabmap::Parameters::defaultFASTMinThreshold();
693  rtabmap::Parameters::parse(parameters_, rtabmap::Parameters::kFASTThreshold(), iniThFAST);
694  rtabmap::Parameters::parse(parameters_, rtabmap::Parameters::kFASTMinThreshold(), minThFAST);
695  ofs << "ORBextractor.iniThFAST: " << iniThFAST << std::endl;
696  ofs << "ORBextractor.minThFAST: " << minThFAST << std::endl;
697  ofs << std::endl;
698 
699  int maxFeatureMapSize = rtabmap::Parameters::defaultOdomORBSLAM2MapSize();
700  rtabmap::Parameters::parse(parameters_, rtabmap::Parameters::kOdomORBSLAM2MapSize(), maxFeatureMapSize);
701 
702  ofs.close();
703 
704  //Create KeyFrame Database
705  mpKeyFrameDatabase = new ORB_SLAM2::KeyFrameDatabase(*mpVocabulary);
706 
707  //Create the Map
708  mpMap = new ORB_SLAM2::Map();
709 
710  //Initialize the Tracking thread
711  //(it will live in the main thread of execution, the one that called this constructor)
712  mpTracker = new ORB_SLAM2::Tracker(0, mpVocabulary, 0, 0, mpMap, mpKeyFrameDatabase, configPath, stereo?ORB_SLAM2::System::STEREO:ORB_SLAM2::System::RGBD, maxFeatureMapSize);
713 
714  //Initialize the Local Mapping thread and launch
715  mpLocalMapper = new ORB_SLAM2::LocalMapping(mpMap, false);
716 
717  //Initialize the Loop Closing thread and launch
718  mpLoopCloser = new ORB_SLAM2::LoopCloser(mpMap, mpKeyFrameDatabase, mpVocabulary, true);
719 
720  mptLocalMapping = new thread(&ORB_SLAM2::LocalMapping::Run, mpLocalMapper);
721  mptLoopClosing = new thread(&ORB_SLAM2::LoopCloser::RunNoLoop, mpLoopCloser);
722 
723  //Set pointers between threads
724  mpTracker->SetLocalMapper(mpLocalMapper);
725  mpTracker->SetLoopClosing(mpLoopCloser);
726  mpTracker->SetViewer(0);
727 
728  mpLocalMapper->SetTracker(mpTracker);
729  mpLocalMapper->SetLoopCloser(mpLoopCloser);
730 
731  mpLoopCloser->SetTracker(mpTracker);
732  mpLoopCloser->SetLocalMapper(mpLocalMapper);
733 
734  return true;
735  }
736 
737  virtual ~ORBSLAM2System()
738  {
739  shutdown();
740  delete mpVocabulary;
741  }
742 
743  void shutdown()
744  {
745  if(mpMap)
746  {
747  mpLocalMapper->RequestFinish();
748  mpLoopCloser->RequestFinish();
749 
750  // Wait until all thread have effectively stopped
751  while(!mpLocalMapper->isFinished() || !mpLoopCloser->isFinished() || mpLoopCloser->isRunningGBA())
752  {
753  usleep(5000);
754  }
755 
756  //cleanup!
757  mptLoopClosing->join();
758  delete mptLoopClosing;
759  mptLoopClosing = 0;
760  mptLocalMapping->join();
761  delete mptLocalMapping;
762  mptLocalMapping = 0;
763  delete mpLoopCloser;
764  mpLoopCloser=0;
765  delete mpLocalMapper;
766  mpLocalMapper=0;
767  delete mpTracker;
768  mpTracker=0;
769  delete mpMap;
770  mpMap=0;
771  delete mpKeyFrameDatabase;
772  mpKeyFrameDatabase=0;
773  }
774  }
775 
776 public:
777  // ORB vocabulary used for place recognition and feature matching.
778  ORB_SLAM2::ORBVocabulary* mpVocabulary;
779 
780  // KeyFrame database for place recognition (relocalization and loop detection).
781  ORB_SLAM2::KeyFrameDatabase* mpKeyFrameDatabase;
782 
783  // Map structure that stores the pointers to all KeyFrames and MapPoints.
784  ORB_SLAM2::Map* mpMap;
785 
786  // Tracker. It receives a frame and computes the associated camera pose.
787  // It also decides when to insert a new keyframe, create some new MapPoints and
788  // performs relocalization if tracking fails.
789  ORB_SLAM2::Tracker* mpTracker;
790 
791  // Local Mapper. It manages the local map and performs local bundle adjustment.
792  ORB_SLAM2::LocalMapping* mpLocalMapper;
793 
794  // Loop Closer. It searches loops with every new keyframe. If there is a loop it performs
795  // a pose graph optimization and full bundle adjustment (in a new thread) afterwards.
796  ORB_SLAM2::LoopCloser* mpLoopCloser;
797 
798  // System threads: Local Mapping, Loop Closing, Viewer.
799  // The Tracking thread "lives" in the main execution thread that creates the System object.
800  std::thread* mptLocalMapping;
801  std::thread* mptLoopClosing;
802 
803  rtabmap::ParametersMap parameters_;
804 };
805 #endif
806 
807 namespace rtabmap {
808 
809 OdometryORBSLAM2::OdometryORBSLAM2(const ParametersMap & parameters) :
810  Odometry(parameters)
811 #ifdef RTABMAP_ORB_SLAM2
812  ,
813  orbslam2_(0),
814  firstFrame_(true),
815  previousPose_(Transform::getIdentity())
816 #endif
817 {
818 #ifdef RTABMAP_ORB_SLAM2
819  orbslam2_ = new ORBSLAM2System(parameters);
820 #endif
821 }
822 
824 {
825 #ifdef RTABMAP_ORB_SLAM2
826  if(orbslam2_)
827  {
828  delete orbslam2_;
829  }
830 #endif
831 }
832 
833 void OdometryORBSLAM2::reset(const Transform & initialPose)
834 {
835  Odometry::reset(initialPose);
836 #ifdef RTABMAP_ORB_SLAM2
837  if(orbslam2_)
838  {
839  orbslam2_->shutdown();
840  }
841  firstFrame_ = true;
842  originLocalTransform_.setNull();
843  previousPose_.setIdentity();
844 #endif
845 }
846 
847 // return not null transform if odometry is correctly computed
849  SensorData & data,
850  const Transform & guess,
851  OdometryInfo * info)
852 {
853  Transform t;
854 
855 #ifdef RTABMAP_ORB_SLAM2
856  UTimer timer;
857 
858  if(data.imageRaw().empty() ||
859  data.imageRaw().rows != data.depthOrRightRaw().rows ||
860  data.imageRaw().cols != data.depthOrRightRaw().cols)
861  {
862  UERROR("Not supported input! RGB (%dx%d) and depth (%dx%d) should have the same size.",
863  data.imageRaw().cols, data.imageRaw().rows, data.depthOrRightRaw().cols, data.depthOrRightRaw().rows);
864  return t;
865  }
866 
867  if(!((data.cameraModels().size() == 1 &&
868  data.cameraModels()[0].isValidForReprojection()) ||
872  {
873  UERROR("Invalid camera model!");
874  return t;
875  }
876 
877  bool stereo = data.cameraModels().size() == 0;
878 
879  cv::Mat covariance;
880  if(orbslam2_->mpTracker == 0)
881  {
882  CameraModel model = data.cameraModels().size()==1?data.cameraModels()[0]:data.stereoCameraModel().left();
883  if(!orbslam2_->init(model, stereo, data.stereoCameraModel().baseline()))
884  {
885  return t;
886  }
887  }
888 
889  cv::Mat Tcw;
890  Transform localTransform;
891  if(stereo)
892  {
893  localTransform = data.stereoCameraModel().localTransform();
894  Tcw = ((ORB_SLAM2::Tracker*)orbslam2_->mpTracker)->GrabImageStereo(data.imageRaw(), data.rightRaw(), data.stamp());
895  }
896  else
897  {
898  localTransform = data.cameraModels()[0].localTransform();
899  cv::Mat depth;
900  if(data.depthRaw().type() == CV_32FC1)
901  {
902  depth = data.depthRaw();
903  }
904  else if(data.depthRaw().type() == CV_16UC1)
905  {
906  depth = util2d::cvtDepthToFloat(data.depthRaw());
907  }
908  Tcw = ((ORB_SLAM2::Tracker*)orbslam2_->mpTracker)->GrabImageRGBD(data.imageRaw(), depth, data.stamp());
909  }
910 
911  Transform previousPoseInv = previousPose_.inverse();
912  if(orbslam2_->mpTracker->mState == ORB_SLAM2::Tracking::LOST)
913  {
914  covariance = cv::Mat::eye(6,6,CV_64FC1)*9999.0f;
915  }
916  else if(Tcw.cols == 4 && Tcw.rows == 4)
917  {
918  Transform p = Transform(cv::Mat(Tcw, cv::Range(0,3), cv::Range(0,4)));
919 
920  if(!p.isNull())
921  {
922  if(!localTransform.isNull())
923  {
924  if(originLocalTransform_.isNull())
925  {
926  originLocalTransform_ = localTransform;
927  }
928  // transform in base frame
929  p = originLocalTransform_ * p.inverse() * localTransform.inverse();
930  }
931  t = previousPoseInv*p;
932  }
933  previousPose_ = p;
934 
935  if(firstFrame_)
936  {
937  // just recovered of being lost, set high covariance
938  covariance = cv::Mat::eye(6,6,CV_64FC1)*9999.0f;
939  firstFrame_ = false;
940  }
941  else
942  {
943  float baseline = data.stereoCameraModel().baseline();
944  if(baseline <= 0.0f)
945  {
946  baseline = rtabmap::Parameters::defaultOdomORBSLAM2Bf();
947  rtabmap::Parameters::parse(orbslam2_->parameters_, rtabmap::Parameters::kOdomORBSLAM2Bf(), baseline);
948  }
949  double linearVar = 0.0001;
950  if(baseline > 0.0f)
951  {
952  linearVar = baseline/8.0;
953  linearVar *= linearVar;
954  }
955 
956  covariance = cv::Mat::eye(6,6, CV_64FC1);
957  covariance.at<double>(0,0) = linearVar;
958  covariance.at<double>(1,1) = linearVar;
959  covariance.at<double>(2,2) = linearVar;
960  covariance.at<double>(3,3) = 0.0001;
961  covariance.at<double>(4,4) = 0.0001;
962  covariance.at<double>(5,5) = 0.0001;
963  }
964  }
965 
966  int totalMapPoints= 0;
967  int totalKfs= 0;
968  if(orbslam2_->mpMap)
969  {
970  totalMapPoints = orbslam2_->mpMap->MapPointsInMap();
971  totalKfs = orbslam2_->mpMap->KeyFramesInMap();
972  }
973 
974  if(info)
975  {
976  info->lost = t.isNull();
977  info->type = (int)kTypeORBSLAM2;
978  info->reg.covariance = covariance;
979  info->localMapSize = totalMapPoints;
980  info->localKeyFrames = totalKfs;
981 
982  if(this->isInfoDataFilled() && orbslam2_->mpTracker && orbslam2_->mpMap)
983  {
984  const std::vector<cv::KeyPoint> & kpts = orbslam2_->mpTracker->mCurrentFrame.mvKeys;
985  info->reg.matchesIDs.resize(kpts.size());
986  info->reg.inliersIDs.resize(kpts.size());
987  int oi = 0;
988  for (unsigned int i = 0; i < kpts.size(); ++i)
989  {
990  int wordId;
991  if(orbslam2_->mpTracker->mCurrentFrame.mvpMapPoints[i] != 0)
992  {
993  wordId = orbslam2_->mpTracker->mCurrentFrame.mvpMapPoints[i]->mnId;
994  }
995  else
996  {
997  wordId = -(i+1);
998  }
999  info->words.insert(std::make_pair(wordId, kpts[i]));
1000  if(orbslam2_->mpTracker->mCurrentFrame.mvpMapPoints[i] != 0)
1001  {
1002  info->reg.matchesIDs[oi] = wordId;
1003  info->reg.inliersIDs[oi] = wordId;
1004  ++oi;
1005  }
1006  }
1007  info->reg.matchesIDs.resize(oi);
1008  info->reg.inliersIDs.resize(oi);
1009  info->reg.inliers = oi;
1010  info->reg.matches = oi;
1011 
1012  std::vector<ORB_SLAM2::MapPoint*> mapPoints = orbslam2_->mpMap->GetAllMapPoints();
1013  Eigen::Affine3f fixRot = (this->getPose()*previousPoseInv*originLocalTransform_).toEigen3f();
1014  for (unsigned int i = 0; i < mapPoints.size(); ++i)
1015  {
1016  cv::Point3f pt(mapPoints[i]->GetWorldPos());
1017  pcl::PointXYZ ptt = pcl::transformPoint(pcl::PointXYZ(pt.x, pt.y, pt.z), fixRot);
1018  info->localMap.insert(std::make_pair(mapPoints[i]->mnId, cv::Point3f(ptt.x, ptt.y, ptt.z)));
1019  }
1020  }
1021  }
1022 
1023  UINFO("Odom update time = %fs, map points=%d, keyframes=%d, lost=%s", timer.elapsed(), totalMapPoints, totalKfs, t.isNull()?"true":"false");
1024 
1025 #else
1026  UERROR("RTAB-Map is not built with ORB_SLAM2 support! Select another visual odometry approach.");
1027 #endif
1028  return t;
1029 }
1030 
1031 } // namespace rtabmap
static std::string homeDir()
Definition: UDirectory.cpp:355
#define NULL
int imageWidth() const
Definition: CameraModel.h:113
static bool parse(const ParametersMap &parameters, const std::string &key, bool &value)
Definition: Parameters.cpp:446
Definition: UTimer.h:46
cv::Mat RTABMAP_EXP cvtDepthToFloat(const cv::Mat &depth16U)
Definition: util2d.cpp:923
cv::Point3f RTABMAP_EXP transformPoint(const cv::Point3f &pt, const Transform &transform)
double elapsed()
Definition: UTimer.h:75
f
bool isInfoDataFilled() const
Definition: Odometry.h:72
virtual void reset(const Transform &initialPose=Transform::getIdentity())
Definition: Odometry.cpp:182
virtual void reset(const Transform &initialPose=Transform::getIdentity())
std::map< int, cv::Point3f > localMap
Definition: OdometryInfo.h:116
std::map< std::string, std::string > ParametersMap
Definition: Parameters.h:41
const Transform & getPose() const
Definition: Odometry.h:71
std::vector< int > inliersIDs
const cv::Mat & imageRaw() const
Definition: SensorData.h:161
cv::Mat rightRaw() const
Definition: SensorData.h:173
cv::Mat D() const
Definition: CameraModel.h:104
#define UASSERT(condition)
Wrappers of STL for convenient functions.
const CameraModel & left() const
double cx() const
Definition: CameraModel.h:97
#define true
Definition: ConvertUTF.c:57
bool isNull() const
Definition: Transform.cpp:107
CvImagePtr cvtColor(const CvImageConstPtr &source, const std::string &encoding)
double fy() const
Definition: CameraModel.h:96
std::string UTILITE_EXP uReplaceChar(const std::string &str, char before, char after)
Definition: UConversion.cpp:32
const cv::Mat & depthOrRightRaw() const
Definition: SensorData.h:162
bool isValidForReprojection() const
Definition: CameraModel.h:81
double stamp() const
Definition: SensorData.h:154
double cy() const
Definition: CameraModel.h:98
const std::vector< CameraModel > & cameraModels() const
Definition: SensorData.h:193
double fx() const
Definition: CameraModel.h:95
std::multimap< int, cv::KeyPoint > words
Definition: OdometryInfo.h:115
const CameraModel & right() const
ROSCONSOLE_DECL void shutdown()
#define UERROR(...)
ULogger class and convenient macros.
const StereoCameraModel & stereoCameraModel() const
Definition: SensorData.h:194
#define UWARN(...)
std::vector< int > matchesIDs
virtual Transform computeTransform(SensorData &image, const Transform &guess=Transform(), OdometryInfo *info=0)
RegistrationInfo reg
Definition: OdometryInfo.h:91
Transform inverse() const
Definition: Transform.cpp:169
const Transform & localTransform() const
int imageHeight() const
Definition: CameraModel.h:114
cv::Mat depthRaw() const
Definition: SensorData.h:172
#define UINFO(...)


rtabmap
Author(s): Mathieu Labbe
autogenerated on Wed Jun 5 2019 22:41:32