36 #include <pcl/common/transforms.h> 37 #include <opencv2/imgproc/types_c.h> 39 #ifdef RTABMAP_ORB_SLAM2 47 class Tracker:
public Tracking
50 Tracker(System* pSys, ORBVocabulary* pVoc, FrameDrawer* pFrameDrawer, MapDrawer* pMapDrawer, Map* pMap,
51 KeyFrameDatabase* pKFDB,
const std::string &strSettingPath,
const int sensor,
long unsigned int maxFeatureMapSize) :
52 Tracking(pSys, pVoc, pFrameDrawer, pMapDrawer, pMap, pKFDB, strSettingPath, sensor),
53 maxFeatureMapSize_(maxFeatureMapSize)
58 long unsigned int maxFeatureMapSize_;
63 if(mState==NO_IMAGES_YET)
65 mState = NOT_INITIALIZED;
68 mLastProcessedState=mState;
71 unique_lock<mutex> lock(mpMap->mMutexMapUpdate);
73 if(mState==NOT_INITIALIZED)
76 StereoInitialization();
96 if(mState==OK || mState==LOST)
99 CheckReplacedInLastFrame();
101 if(mVelocity.empty() || mCurrentFrame.mnId<mnLastRelocFrameId+2)
103 bOK = TrackReferenceKeyFrame();
107 bOK = TrackWithMotionModel();
109 bOK = TrackReferenceKeyFrame();
118 bOK = Relocalization();
127 bOK = Relocalization();
135 if(!mVelocity.empty())
137 bOK = TrackWithMotionModel();
141 bOK = TrackReferenceKeyFrame();
153 bool bOKReloc =
false;
154 std::vector<MapPoint*> vpMPsMM;
155 std::vector<bool> vbOutMM;
157 if(!mVelocity.empty())
159 bOKMM = TrackWithMotionModel();
160 vpMPsMM = mCurrentFrame.mvpMapPoints;
161 vbOutMM = mCurrentFrame.mvbOutlier;
162 TcwMM = mCurrentFrame.mTcw.clone();
164 bOKReloc = Relocalization();
166 if(bOKMM && !bOKReloc)
168 mCurrentFrame.SetPose(TcwMM);
169 mCurrentFrame.mvpMapPoints = vpMPsMM;
170 mCurrentFrame.mvbOutlier = vbOutMM;
174 for(
int i =0; i<mCurrentFrame.N; i++)
176 if(mCurrentFrame.mvpMapPoints[i] && !mCurrentFrame.mvbOutlier[i])
178 mCurrentFrame.mvpMapPoints[i]->IncreaseFound();
188 bOK = bOKReloc || bOKMM;
193 mCurrentFrame.mpReferenceKF = mpReferenceKF;
199 bOK = TrackLocalMap();
207 bOK = TrackLocalMap();
222 if(!mLastFrame.mTcw.empty())
224 cv::Mat LastTwc = cv::Mat::eye(4,4,CV_32F);
225 mLastFrame.GetRotationInverse().copyTo(LastTwc.rowRange(0,3).colRange(0,3));
226 mLastFrame.GetCameraCenter().copyTo(LastTwc.rowRange(0,3).col(3));
227 mVelocity = mCurrentFrame.mTcw*LastTwc;
230 mVelocity = cv::Mat();
235 for(
int i=0; i<mCurrentFrame.N; i++)
237 MapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
239 if(pMP->Observations()<1)
241 mCurrentFrame.mvbOutlier[i] =
false;
242 mCurrentFrame.mvpMapPoints[i]=
static_cast<MapPoint*
>(
NULL);
247 for(list<MapPoint*>::iterator lit = mlpTemporalPoints.begin(), lend = mlpTemporalPoints.end(); lit!=lend; lit++)
249 MapPoint* pMP = *lit;
252 mlpTemporalPoints.clear();
255 if(NeedNewKeyFrame())
260 if(maxFeatureMapSize_ > 0)
263 if(mpMap->KeyFramesInMap()>1 && mpMap->MapPointsInMap()>maxFeatureMapSize_)
265 std::vector<KeyFrame*> kfs = mpMap->GetAllKeyFrames();
266 std::map<long unsigned int, KeyFrame*> kfsSorted;
267 for(
unsigned int i=1; i<kfs.size(); ++i)
269 kfsSorted.insert(std::make_pair(kfs[i]->mnId, kfs[i]));
271 KeyFrame * lastFrame = kfsSorted.rbegin()->second;
272 std::vector<MapPoint*> mapPoints = mpMap->GetAllMapPoints();
273 std::map<long unsigned int, MapPoint*> mapPointsSorted;
274 for(
unsigned int i=0; i<mapPoints.size(); ++i)
276 mapPointsSorted.insert(std::make_pair(mapPoints[i]->mnId, mapPoints[i]));
279 for(std::map<long unsigned int, MapPoint*>::iterator iter=mapPointsSorted.begin();
280 iter != mapPointsSorted.end() && mpMap->MapPointsInMap()>maxFeatureMapSize_;
283 if(!iter->second->IsInKeyFrame(lastFrame))
288 iter->second->SetBadFlag();
292 for(std::map<long unsigned int, KeyFrame*>::iterator iter=kfsSorted.begin();
293 iter != kfsSorted.end();
296 if(iter->second!=lastFrame && iter->second->GetMapPoints().size()==0)
301 iter->second->SetErase();
315 for(
int i=0; i<mCurrentFrame.N;i++)
317 if(mCurrentFrame.mvpMapPoints[i] && mCurrentFrame.mvbOutlier[i])
318 mCurrentFrame.mvpMapPoints[i]=
static_cast<MapPoint*
>(
NULL);
327 UWARN(
"Track lost...");
333 if(!mCurrentFrame.mpReferenceKF)
334 mCurrentFrame.mpReferenceKF = mpReferenceKF;
336 mLastFrame = Frame(mCurrentFrame);
340 if(!mCurrentFrame.mTcw.empty())
342 cv::Mat Tcr = mCurrentFrame.mTcw*mCurrentFrame.mpReferenceKF->GetPoseInverse();
343 mlRelativeFramePoses.push_back(Tcr);
344 mlpReferences.push_back(mpReferenceKF);
345 mlFrameTimes.push_back(mCurrentFrame.mTimeStamp);
346 mlbLost.push_back(mState==LOST);
351 mlRelativeFramePoses.push_back(mlRelativeFramePoses.back());
352 mlpReferences.push_back(mlpReferences.back());
353 mlFrameTimes.push_back(mlFrameTimes.back());
354 mlbLost.push_back(mState==LOST);
358 void StereoInitialization()
360 if(mCurrentFrame.N>500)
363 mCurrentFrame.SetPose(cv::Mat::eye(4,4,CV_32F));
366 KeyFrame* pKFini =
new KeyFrame(mCurrentFrame,mpMap,mpKeyFrameDB);
369 mpMap->AddKeyFrame(pKFini);
372 for(
int i=0; i<mCurrentFrame.N;i++)
374 float z = mCurrentFrame.mvDepth[i];
377 cv::Mat x3D = mCurrentFrame.UnprojectStereo(i);
378 MapPoint* pNewMP =
new MapPoint(x3D,pKFini,mpMap);
379 pNewMP->AddObservation(pKFini,i);
380 pKFini->AddMapPoint(pNewMP,i);
381 pNewMP->ComputeDistinctiveDescriptors();
382 pNewMP->UpdateNormalAndDepth();
383 mpMap->AddMapPoint(pNewMP);
385 mCurrentFrame.mvpMapPoints[i]=pNewMP;
389 cout <<
"New map created with " << mpMap->MapPointsInMap() <<
" points" << endl;
391 mpLocalMapper->InsertKeyFrame(pKFini);
393 mLastFrame = Frame(mCurrentFrame);
394 mnLastKeyFrameId=mCurrentFrame.mnId;
395 mpLastKeyFrame = pKFini;
397 mvpLocalKeyFrames.push_back(pKFini);
398 mvpLocalMapPoints=mpMap->GetAllMapPoints();
399 mpReferenceKF = pKFini;
400 mCurrentFrame.mpReferenceKF = pKFini;
402 mpMap->SetReferenceMapPoints(mvpLocalMapPoints);
404 mpMap->mvpKeyFrameOrigins.push_back(pKFini);
413 cv::Mat GrabImageStereo(
const cv::Mat &imRectLeft,
const cv::Mat &imRectRight,
const double ×tamp)
415 mImGray = imRectLeft;
416 cv::Mat imGrayRight = imRectRight;
418 if(mImGray.channels()==3)
422 cvtColor(mImGray,mImGray,CV_RGB2GRAY);
426 cvtColor(mImGray,mImGray,CV_BGR2GRAY);
429 else if(mImGray.channels()==4)
433 cvtColor(mImGray,mImGray,CV_RGBA2GRAY);
437 cvtColor(mImGray,mImGray,CV_BGRA2GRAY);
440 if(imGrayRight.channels()==3)
444 cvtColor(imGrayRight,imGrayRight,CV_RGB2GRAY);
448 cvtColor(imGrayRight,imGrayRight,CV_BGR2GRAY);
451 else if(imGrayRight.channels()==4)
455 cvtColor(imGrayRight,imGrayRight,CV_RGBA2GRAY);
459 cvtColor(imGrayRight,imGrayRight,CV_BGRA2GRAY);
463 mCurrentFrame = Frame(mImGray,imGrayRight,timestamp,mpORBextractorLeft,mpORBextractorRight,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth);
467 return mCurrentFrame.mTcw.clone();
470 cv::Mat GrabImageRGBD(
const cv::Mat &imRGB,
const cv::Mat &imD,
const double ×tamp)
473 cv::Mat imDepth = imD;
475 if(mImGray.channels()==3)
478 cvtColor(mImGray,mImGray,CV_RGB2GRAY);
480 cvtColor(mImGray,mImGray,CV_BGR2GRAY);
482 else if(mImGray.channels()==4)
485 cvtColor(mImGray,mImGray,CV_RGBA2GRAY);
487 cvtColor(mImGray,mImGray,CV_BGRA2GRAY);
490 UASSERT(imDepth.type()==CV_32F);
492 mCurrentFrame = Frame(mImGray,imDepth,timestamp,mpORBextractorLeft,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth);
496 return mCurrentFrame.mTcw.clone();
501 class LoopCloser:
public LoopClosing
504 LoopCloser(Map* pMap, KeyFrameDatabase* pDB, ORBVocabulary* pVoc,
const bool bFixScale) :
505 LoopClosing(pMap, pDB, pVoc, bFixScale)
518 unique_lock<mutex> lock(mMutexLoopQueue);
519 mlpLoopKeyFrameQueue.clear();
541 mpKeyFrameDatabase(0),
548 parameters_(parameters)
550 std::string vocabularyPath;
553 if(!vocabularyPath.empty())
557 UWARN(
"Loading ORB Vocabulary: \"%s\". This could take a while...", vocabularyPath.c_str());
558 mpVocabulary =
new ORB_SLAM2::ORBVocabulary();
559 bool bVocLoad = mpVocabulary->loadFromTextFile(vocabularyPath);
562 UERROR(
"Failed to open vocabulary at %s", vocabularyPath.c_str());
568 UWARN(
"Vocabulary loaded!");
573 UERROR(
"ORBSLAM2 vocabulary path should be set! (Parameter name=\"%s\")", rtabmap::Parameters::kOdomORBSLAM2VocPath().c_str());
581 UERROR(
"Vocabulary not loaded!");
588 std::string workingDir;
590 if(workingDir.empty())
594 std::string configPath = workingDir+
"/rtabmap_orbslam2.yaml";
595 std::ofstream ofs (configPath, std::ofstream::out);
596 ofs <<
"%YAML:1.0" << std::endl;
600 ofs <<
"Camera.fx: " << model.
fx() << std::endl;
601 ofs <<
"Camera.fy: " << model.
fy() << std::endl;
602 ofs <<
"Camera.cx: " << model.
cx() << std::endl;
603 ofs <<
"Camera.cy: " << model.
cy() << std::endl;
606 if(model.
D().cols < 4)
608 ofs <<
"Camera.k1: " << 0 << std::endl;
609 ofs <<
"Camera.k2: " << 0 << std::endl;
610 ofs <<
"Camera.p1: " << 0 << std::endl;
611 ofs <<
"Camera.p2: " << 0 << std::endl;
614 ofs <<
"Camera.k3: " << 0 << std::endl;
617 if(model.
D().cols >= 4)
619 ofs <<
"Camera.k1: " << model.
D().at<
double>(0,0) << std::endl;
620 ofs <<
"Camera.k2: " << model.
D().at<
double>(0,1) << std::endl;
621 ofs <<
"Camera.p1: " << model.
D().at<
double>(0,2) << std::endl;
622 ofs <<
"Camera.p2: " << model.
D().at<
double>(0,3) << std::endl;
624 if(model.
D().cols >= 5)
626 ofs <<
"Camera.k3: " << model.
D().at<
double>(0,4) << std::endl;
628 if(model.
D().cols > 5)
630 UWARN(
"Unhandled camera distortion size %d, only 5 first coefficients used", model.
D().cols);
634 ofs <<
"Camera.width: " << model.
imageWidth() << std::endl;
635 ofs <<
"Camera.height: " << model.
imageHeight() << std::endl;
641 baseline = rtabmap::Parameters::defaultOdomORBSLAM2Bf();
644 ofs <<
"Camera.bf: " << model.
fx()*baseline << std::endl;
649 ofs <<
"Camera.RGB: 1" << std::endl;
652 float fps = rtabmap::Parameters::defaultOdomORBSLAM2Fps();
654 ofs <<
"Camera.fps: " << fps << std::endl;
658 double thDepth = rtabmap::Parameters::defaultOdomORBSLAM2ThDepth();
660 ofs <<
"ThDepth: " << thDepth << std::endl;
664 ofs <<
"DepthMapFactor: " << 1000.0 << std::endl;
671 int features = rtabmap::Parameters::defaultOdomORBSLAM2MaxFeatures();
673 ofs <<
"ORBextractor.nFeatures: " << features << std::endl;
677 double scaleFactor = rtabmap::Parameters::defaultORBScaleFactor();
679 ofs <<
"ORBextractor.scaleFactor: " << scaleFactor << std::endl;
683 int levels = rtabmap::Parameters::defaultORBNLevels();
685 ofs <<
"ORBextractor.nLevels: " << levels << std::endl;
692 int iniThFAST = rtabmap::Parameters::defaultFASTThreshold();
693 int minThFAST = rtabmap::Parameters::defaultFASTMinThreshold();
696 ofs <<
"ORBextractor.iniThFAST: " << iniThFAST << std::endl;
697 ofs <<
"ORBextractor.minThFAST: " << minThFAST << std::endl;
700 int maxFeatureMapSize = rtabmap::Parameters::defaultOdomORBSLAM2MapSize();
706 mpKeyFrameDatabase =
new ORB_SLAM2::KeyFrameDatabase(*mpVocabulary);
709 mpMap =
new ORB_SLAM2::Map();
713 mpTracker =
new ORB_SLAM2::Tracker(0, mpVocabulary, 0, 0, mpMap, mpKeyFrameDatabase, configPath, stereo?ORB_SLAM2::System::STEREO:ORB_SLAM2::System::RGBD, maxFeatureMapSize);
716 mpLocalMapper =
new ORB_SLAM2::LocalMapping(mpMap,
false);
719 mpLoopCloser =
new ORB_SLAM2::LoopCloser(mpMap, mpKeyFrameDatabase, mpVocabulary,
true);
721 mptLocalMapping =
new thread(&ORB_SLAM2::LocalMapping::Run, mpLocalMapper);
722 mptLoopClosing =
new thread(&ORB_SLAM2::LoopCloser::RunNoLoop, mpLoopCloser);
725 mpTracker->SetLocalMapper(mpLocalMapper);
726 mpTracker->SetLoopClosing(mpLoopCloser);
727 mpTracker->SetViewer(0);
729 mpLocalMapper->SetTracker(mpTracker);
730 mpLocalMapper->SetLoopCloser(mpLoopCloser);
732 mpLoopCloser->SetTracker(mpTracker);
733 mpLoopCloser->SetLocalMapper(mpLocalMapper);
738 virtual ~ORBSLAM2System()
748 mpLocalMapper->RequestFinish();
749 mpLoopCloser->RequestFinish();
752 while(!mpLocalMapper->isFinished() || !mpLoopCloser->isFinished() || mpLoopCloser->isRunningGBA())
758 mptLoopClosing->join();
759 delete mptLoopClosing;
761 mptLocalMapping->join();
762 delete mptLocalMapping;
766 delete mpLocalMapper;
772 delete mpKeyFrameDatabase;
773 mpKeyFrameDatabase=0;
779 ORB_SLAM2::ORBVocabulary* mpVocabulary;
782 ORB_SLAM2::KeyFrameDatabase* mpKeyFrameDatabase;
785 ORB_SLAM2::Map* mpMap;
790 ORB_SLAM2::Tracker* mpTracker;
793 ORB_SLAM2::LocalMapping* mpLocalMapper;
797 ORB_SLAM2::LoopCloser* mpLoopCloser;
801 std::thread* mptLocalMapping;
802 std::thread* mptLoopClosing;
812 #ifdef RTABMAP_ORB_SLAM2
819 #ifdef RTABMAP_ORB_SLAM2 820 orbslam2_ =
new ORBSLAM2System(parameters);
826 #ifdef RTABMAP_ORB_SLAM2 837 #ifdef RTABMAP_ORB_SLAM2 840 orbslam2_->shutdown();
843 originLocalTransform_.setNull();
844 previousPose_.setIdentity();
856 #ifdef RTABMAP_ORB_SLAM2 863 UERROR(
"Not supported input! RGB (%dx%d) and depth (%dx%d) should have the same size.",
874 UERROR(
"Invalid camera model!");
881 if(orbslam2_->mpTracker == 0)
895 Tcw = ((ORB_SLAM2::Tracker*)orbslam2_->mpTracker)->GrabImageStereo(data.
imageRaw(), data.
rightRaw(), data.
stamp());
899 localTransform = data.
cameraModels()[0].localTransform();
901 if(data.
depthRaw().type() == CV_32FC1)
905 else if(data.
depthRaw().type() == CV_16UC1)
909 Tcw = ((ORB_SLAM2::Tracker*)orbslam2_->mpTracker)->GrabImageRGBD(data.
imageRaw(), depth, data.
stamp());
913 if(orbslam2_->mpTracker->mState == ORB_SLAM2::Tracking::LOST)
915 covariance = cv::Mat::eye(6,6,CV_64FC1)*9999.0f;
917 else if(Tcw.cols == 4 && Tcw.rows == 4)
923 if(!localTransform.
isNull())
925 if(originLocalTransform_.isNull())
927 originLocalTransform_ = localTransform;
930 p = originLocalTransform_ * p.
inverse() * localTransform.
inverse();
932 t = previousPoseInv*p;
939 covariance = cv::Mat::eye(6,6,CV_64FC1)*9999.0f;
947 baseline = rtabmap::Parameters::defaultOdomORBSLAM2Bf();
950 double linearVar = 0.0001;
953 linearVar = baseline/8.0;
954 linearVar *= linearVar;
957 covariance = cv::Mat::eye(6,6, CV_64FC1);
958 covariance.at<
double>(0,0) = linearVar;
959 covariance.at<
double>(1,1) = linearVar;
960 covariance.at<
double>(2,2) = linearVar;
961 covariance.at<
double>(3,3) = 0.0001;
962 covariance.at<
double>(4,4) = 0.0001;
963 covariance.at<
double>(5,5) = 0.0001;
967 int totalMapPoints= 0;
971 totalMapPoints = orbslam2_->mpMap->MapPointsInMap();
972 totalKfs = orbslam2_->mpMap->KeyFramesInMap();
985 const std::vector<cv::KeyPoint> & kpts = orbslam2_->mpTracker->mCurrentFrame.mvKeys;
989 for (
unsigned int i = 0; i < kpts.size(); ++i)
992 if(orbslam2_->mpTracker->mCurrentFrame.mvpMapPoints[i] != 0)
994 wordId = orbslam2_->mpTracker->mCurrentFrame.mvpMapPoints[i]->mnId;
1000 info->
words.insert(std::make_pair(wordId, kpts[i]));
1001 if(orbslam2_->mpTracker->mCurrentFrame.mvpMapPoints[i] != 0)
1013 std::vector<ORB_SLAM2::MapPoint*> mapPoints = orbslam2_->mpMap->GetAllMapPoints();
1014 Eigen::Affine3f fixRot = (this->
getPose()*previousPoseInv*originLocalTransform_).toEigen3f();
1015 for (
unsigned int i = 0; i < mapPoints.size(); ++i)
1017 cv::Point3f pt(mapPoints[i]->GetWorldPos());
1019 info->
localMap.insert(std::make_pair(mapPoints[i]->mnId, cv::Point3f(ptt.x, ptt.y, ptt.z)));
1024 UINFO(
"Odom update time = %fs, map points=%d, keyframes=%d, lost=%s", timer.
elapsed(), totalMapPoints, totalKfs, t.
isNull()?
"true":
"false");
1027 UERROR(
"RTAB-Map is not built with ORB_SLAM2 support! Select another visual odometry approach.");
static std::string homeDir()
static bool parse(const ParametersMap ¶meters, const std::string &key, bool &value)
cv::Mat RTABMAP_EXP cvtDepthToFloat(const cv::Mat &depth16U)
cv::Point3f RTABMAP_EXP transformPoint(const cv::Point3f &pt, const Transform &transform)
bool isInfoDataFilled() const
virtual void reset(const Transform &initialPose=Transform::getIdentity())
virtual void reset(const Transform &initialPose=Transform::getIdentity())
def init(descriptorDim, matchThreshold, iterations, cuda, model_path)
std::map< int, cv::Point3f > localMap
std::map< std::string, std::string > ParametersMap
virtual ~OdometryORBSLAM2()
const Transform & getPose() const
std::vector< int > inliersIDs
const cv::Mat & imageRaw() const
#define UASSERT(condition)
Wrappers of STL for convenient functions.
bool isValidForProjection() const
const CameraModel & left() const
CvImagePtr cvtColor(const CvImageConstPtr &source, const std::string &encoding)
std::string UTILITE_EXP uReplaceChar(const std::string &str, char before, char after)
const cv::Mat & depthOrRightRaw() const
bool isValidForReprojection() const
const std::vector< CameraModel > & cameraModels() const
std::multimap< int, cv::KeyPoint > words
const CameraModel & right() const
ROSCONSOLE_DECL void shutdown()
ULogger class and convenient macros.
const StereoCameraModel & stereoCameraModel() const
std::vector< int > matchesIDs
virtual Transform computeTransform(SensorData &image, const Transform &guess=Transform(), OdometryInfo *info=0)
const Transform & localTransform() const