35 #include <pcl/common/transforms.h> 36 #include <opencv2/imgproc/types_c.h> 39 #ifdef RTABMAP_ORB_SLAM 45 #if RTABMAP_ORB_SLAM == 3 51 class Tracker:
public Tracking
54 #if RTABMAP_ORB_SLAM == 3 55 Tracker(System* pSys, ORBVocabulary* pVoc, FrameDrawer* pFrameDrawer, MapDrawer* pMapDrawer, Atlas* pMap,
57 Tracker(System* pSys, ORBVocabulary* pVoc, FrameDrawer* pFrameDrawer, MapDrawer* pMapDrawer, Map* pMap,
59 KeyFrameDatabase* pKFDB,
const std::string &strSettingPath,
const int sensor,
long unsigned int maxFeatureMapSize) :
60 Tracking(pSys, pVoc, pFrameDrawer, pMapDrawer, pMap, pKFDB, strSettingPath, sensor),
61 maxFeatureMapSize_(maxFeatureMapSize)
65 long unsigned int maxFeatureMapSize_;
70 #if RTABMAP_ORB_SLAM == 3 71 Map* mpMap = mpAtlas->GetCurrentMap();
73 if(mState==NO_IMAGES_YET)
75 mState = NOT_INITIALIZED;
78 mLastProcessedState=mState;
81 unique_lock<mutex> lock(mpMap->mMutexMapUpdate);
83 if(mState==NOT_INITIALIZED)
86 StereoInitialization();
94 #if RTABMAP_ORB_SLAM == 3 95 mLastFrame = Frame(mCurrentFrame);
99 #if RTABMAP_ORB_SLAM == 3 100 if(mpAtlas->GetAllMaps().size() == 1)
102 mnFirstFrameId = mCurrentFrame.mnId;
117 if(mState==OK || mState==LOST)
120 CheckReplacedInLastFrame();
122 if(mVelocity.empty() || mCurrentFrame.mnId<mnLastRelocFrameId+2)
124 bOK = TrackReferenceKeyFrame();
128 bOK = TrackWithMotionModel();
130 bOK = TrackReferenceKeyFrame();
139 bOK = Relocalization();
148 bOK = Relocalization();
156 if(!mVelocity.empty())
158 bOK = TrackWithMotionModel();
162 bOK = TrackReferenceKeyFrame();
174 bool bOKReloc =
false;
175 std::vector<MapPoint*> vpMPsMM;
176 std::vector<bool> vbOutMM;
178 if(!mVelocity.empty())
180 bOKMM = TrackWithMotionModel();
181 vpMPsMM = mCurrentFrame.mvpMapPoints;
182 vbOutMM = mCurrentFrame.mvbOutlier;
183 TcwMM = mCurrentFrame.mTcw.clone();
185 bOKReloc = Relocalization();
187 if(bOKMM && !bOKReloc)
189 mCurrentFrame.SetPose(TcwMM);
190 mCurrentFrame.mvpMapPoints = vpMPsMM;
191 mCurrentFrame.mvbOutlier = vbOutMM;
195 for(
int i =0; i<mCurrentFrame.N; i++)
197 if(mCurrentFrame.mvpMapPoints[i] && !mCurrentFrame.mvbOutlier[i])
199 mCurrentFrame.mvpMapPoints[i]->IncreaseFound();
209 bOK = bOKReloc || bOKMM;
214 if(!mCurrentFrame.mpReferenceKF)
215 mCurrentFrame.mpReferenceKF = mpReferenceKF;
221 bOK = TrackLocalMap();
229 bOK = TrackLocalMap();
244 if(!mLastFrame.mTcw.empty())
246 cv::Mat LastTwc = cv::Mat::eye(4,4,CV_32F);
247 mLastFrame.GetRotationInverse().copyTo(LastTwc.rowRange(0,3).colRange(0,3));
248 mLastFrame.GetCameraCenter().copyTo(LastTwc.rowRange(0,3).col(3));
249 mVelocity = mCurrentFrame.mTcw*LastTwc;
252 mVelocity = cv::Mat();
257 for(
int i=0; i<mCurrentFrame.N; i++)
259 MapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
261 if(pMP->Observations()<1)
263 mCurrentFrame.mvbOutlier[i] =
false;
264 mCurrentFrame.mvpMapPoints[i]=
static_cast<MapPoint*
>(
NULL);
269 for(list<MapPoint*>::iterator lit = mlpTemporalPoints.begin(), lend = mlpTemporalPoints.end(); lit!=lend; lit++)
271 MapPoint* pMP = *lit;
274 mlpTemporalPoints.clear();
277 if(NeedNewKeyFrame())
282 if(maxFeatureMapSize_ > 0)
285 if(mpMap->KeyFramesInMap()>1 && mpMap->MapPointsInMap()>maxFeatureMapSize_)
287 std::vector<KeyFrame*> kfs = mpMap->GetAllKeyFrames();
288 std::map<long unsigned int, KeyFrame*> kfsSorted;
289 for(
unsigned int i=1; i<kfs.size(); ++i)
291 kfsSorted.insert(std::make_pair(kfs[i]->mnId, kfs[i]));
293 KeyFrame * lastFrame = kfsSorted.rbegin()->second;
294 std::vector<MapPoint*> mapPoints = mpMap->GetAllMapPoints();
295 std::map<long unsigned int, MapPoint*> mapPointsSorted;
296 for(
unsigned int i=0; i<mapPoints.size(); ++i)
298 mapPointsSorted.insert(std::make_pair(mapPoints[i]->mnId, mapPoints[i]));
301 for(std::map<long unsigned int, MapPoint*>::iterator iter=mapPointsSorted.begin();
302 iter != mapPointsSorted.end() && mpMap->MapPointsInMap()>maxFeatureMapSize_;
305 if(!iter->second->IsInKeyFrame(lastFrame))
310 iter->second->SetBadFlag();
314 for(std::map<long unsigned int, KeyFrame*>::iterator iter=kfsSorted.begin();
315 iter != kfsSorted.end();
318 if(iter->second!=lastFrame && iter->second->GetMapPoints().size()==0)
323 iter->second->SetErase();
337 for(
int i=0; i<mCurrentFrame.N;i++)
339 if(mCurrentFrame.mvpMapPoints[i] && mCurrentFrame.mvbOutlier[i])
340 mCurrentFrame.mvpMapPoints[i]=
static_cast<MapPoint*
>(
NULL);
349 UWARN(
"Track lost...");
355 if(!mCurrentFrame.mpReferenceKF)
356 mCurrentFrame.mpReferenceKF = mpReferenceKF;
358 mLastFrame = Frame(mCurrentFrame);
362 if(!mCurrentFrame.mTcw.empty())
364 cv::Mat Tcr = mCurrentFrame.mTcw*mCurrentFrame.mpReferenceKF->GetPoseInverse();
365 mlRelativeFramePoses.push_back(Tcr);
366 mlpReferences.push_back(mpReferenceKF);
367 mlFrameTimes.push_back(mCurrentFrame.mTimeStamp);
368 mlbLost.push_back(mState==LOST);
373 mlRelativeFramePoses.push_back(mlRelativeFramePoses.back());
374 mlpReferences.push_back(mlpReferences.back());
375 mlFrameTimes.push_back(mlFrameTimes.back());
376 mlbLost.push_back(mState==LOST);
380 void StereoInitialization()
382 if(mCurrentFrame.N>500)
385 mCurrentFrame.SetPose(cv::Mat::eye(4,4,CV_32F));
387 #if RTABMAP_ORB_SLAM == 3 388 Map* mpMap = mpAtlas->GetCurrentMap();
391 KeyFrame* pKFini =
new KeyFrame(mCurrentFrame,mpMap,mpKeyFrameDB);
394 mpMap->AddKeyFrame(pKFini);
397 for(
int i=0; i<mCurrentFrame.N;i++)
399 float z = mCurrentFrame.mvDepth[i];
402 cv::Mat x3D = mCurrentFrame.UnprojectStereo(i);
403 MapPoint* pNewMP =
new MapPoint(x3D,pKFini,mpMap);
404 pNewMP->AddObservation(pKFini,i);
405 pKFini->AddMapPoint(pNewMP,i);
406 pNewMP->ComputeDistinctiveDescriptors();
407 pNewMP->UpdateNormalAndDepth();
408 mpMap->AddMapPoint(pNewMP);
410 mCurrentFrame.mvpMapPoints[i]=pNewMP;
414 cout <<
"New map created with " << mpMap->MapPointsInMap() <<
" points" << endl;
416 mpLocalMapper->InsertKeyFrame(pKFini);
418 mLastFrame = Frame(mCurrentFrame);
419 mnLastKeyFrameId=mCurrentFrame.mnId;
420 mpLastKeyFrame = pKFini;
422 mvpLocalKeyFrames.push_back(pKFini);
423 mvpLocalMapPoints=mpMap->GetAllMapPoints();
424 mpReferenceKF = pKFini;
425 mCurrentFrame.mpReferenceKF = pKFini;
427 mpMap->SetReferenceMapPoints(mvpLocalMapPoints);
429 mpMap->mvpKeyFrameOrigins.push_back(pKFini);
438 cv::Mat GrabImageStereo(
const cv::Mat &imRectLeft,
const cv::Mat &imRectRight,
const double ×tamp)
440 mImGray = imRectLeft;
441 cv::Mat imGrayRight = imRectRight;
443 if(mImGray.channels()==3)
447 cvtColor(mImGray,mImGray,CV_RGB2GRAY);
451 cvtColor(mImGray,mImGray,CV_BGR2GRAY);
454 else if(mImGray.channels()==4)
458 cvtColor(mImGray,mImGray,CV_RGBA2GRAY);
462 cvtColor(mImGray,mImGray,CV_BGRA2GRAY);
465 if(imGrayRight.channels()==3)
469 cvtColor(imGrayRight,imGrayRight,CV_RGB2GRAY);
473 cvtColor(imGrayRight,imGrayRight,CV_BGR2GRAY);
476 else if(imGrayRight.channels()==4)
480 cvtColor(imGrayRight,imGrayRight,CV_RGBA2GRAY);
484 cvtColor(imGrayRight,imGrayRight,CV_BGRA2GRAY);
487 #if RTABMAP_ORB_SLAM == 3 488 mCurrentFrame = Frame(mImGray,imGrayRight,timestamp,mpORBextractorLeft,mpORBextractorRight,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth, mpCamera);
490 mCurrentFrame = Frame(mImGray,imGrayRight,timestamp,mpORBextractorLeft,mpORBextractorRight,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth);
494 return mCurrentFrame.mTcw.clone();
497 cv::Mat GrabImageRGBD(
const cv::Mat &imRGB,
const cv::Mat &imD,
const double ×tamp)
500 cv::Mat imDepth = imD;
502 if(mImGray.channels()==3)
505 cvtColor(mImGray,mImGray,CV_RGB2GRAY);
507 cvtColor(mImGray,mImGray,CV_BGR2GRAY);
509 else if(mImGray.channels()==4)
512 cvtColor(mImGray,mImGray,CV_RGBA2GRAY);
514 cvtColor(mImGray,mImGray,CV_BGRA2GRAY);
517 UASSERT(imDepth.type()==CV_32F);
519 #if RTABMAP_ORB_SLAM == 3 520 mCurrentFrame = Frame(mImGray,imDepth,timestamp,mpORBextractorLeft,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth, mpCamera);
522 mCurrentFrame = Frame(mImGray,imDepth,timestamp,mpORBextractorLeft,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth);
526 return mCurrentFrame.mTcw.clone();
531 class LoopCloser:
public LoopClosing
534 #if RTABMAP_ORB_SLAM == 3 535 LoopCloser(Atlas* pMap, KeyFrameDatabase* pDB, ORBVocabulary* pVoc,
const bool bFixScale) :
537 LoopCloser(Map* pMap, KeyFrameDatabase* pDB, ORBVocabulary* pVoc,const bool bFixScale) :
539 LoopClosing(pMap, pDB, pVoc, bFixScale)
551 unique_lock<mutex> lock(mMutexLoopQueue);
552 mlpLoopKeyFrameQueue.clear();
569 #if RTABMAP_ORB_SLAM == 3 570 using namespace ORB_SLAM3;
580 mpKeyFrameDatabase(0),
587 parameters_(parameters)
589 std::string vocabularyPath;
592 if(!vocabularyPath.empty())
596 UWARN(
"Loading ORB Vocabulary: \"%s\". This could take a while...", vocabularyPath.c_str());
597 mpVocabulary =
new ORBVocabulary();
598 bool bVocLoad = mpVocabulary->loadFromTextFile(vocabularyPath);
601 UERROR(
"Failed to open vocabulary at %s", vocabularyPath.c_str());
607 UWARN(
"Vocabulary loaded!");
612 UERROR(
"ORBSLAM2 vocabulary path should be set! (Parameter name=\"%s\")", rtabmap::Parameters::kOdomORBSLAMVocPath().c_str());
620 UERROR(
"Vocabulary not loaded!");
627 std::string workingDir;
629 if(workingDir.empty())
633 std::string configPath = workingDir+
"/rtabmap_orbslam.yaml";
634 std::ofstream ofs (configPath, std::ofstream::out);
635 ofs <<
"%YAML:1.0" << std::endl;
639 ofs <<
"Camera.type: \"PinHole\"" << std::endl;
642 ofs << fixed << setprecision(13);
645 ofs <<
"Camera.fx: " << model.
fx() << std::endl;
646 ofs <<
"Camera.fy: " << model.
fy() << std::endl;
647 ofs <<
"Camera.cx: " << model.
cx() << std::endl;
648 ofs <<
"Camera.cy: " << model.
cy() << std::endl;
651 if(model.
D().cols < 4)
653 ofs <<
"Camera.k1: " << 0.0 << std::endl;
654 ofs <<
"Camera.k2: " << 0.0 << std::endl;
655 ofs <<
"Camera.p1: " << 0.0 << std::endl;
656 ofs <<
"Camera.p2: " << 0.0 << std::endl;
659 ofs <<
"Camera.k3: " << 0.0 << std::endl;
662 if(model.
D().cols >= 4)
664 ofs <<
"Camera.k1: " << model.
D().at<
double>(0,0) << std::endl;
665 ofs <<
"Camera.k2: " << model.
D().at<
double>(0,1) << std::endl;
666 ofs <<
"Camera.p1: " << model.
D().at<
double>(0,2) << std::endl;
667 ofs <<
"Camera.p2: " << model.
D().at<
double>(0,3) << std::endl;
669 if(model.
D().cols >= 5)
671 ofs <<
"Camera.k3: " << model.
D().at<
double>(0,4) << std::endl;
673 if(model.
D().cols > 5)
675 UWARN(
"Unhandled camera distortion size %d, only 5 first coefficients used", model.
D().cols);
679 ofs <<
"Camera.width: " << model.
imageWidth() << std::endl;
680 ofs <<
"Camera.height: " << model.
imageHeight() << std::endl;
686 baseline = rtabmap::Parameters::defaultOdomORBSLAMBf();
689 ofs <<
"Camera.bf: " << model.
fx()*baseline << std::endl;
694 ofs <<
"Camera.RGB: 1" << std::endl;
697 float fps = rtabmap::Parameters::defaultOdomORBSLAMFps();
699 ofs <<
"Camera.fps: " << fps << std::endl;
703 double thDepth = rtabmap::Parameters::defaultOdomORBSLAMThDepth();
705 ofs <<
"ThDepth: " << thDepth << std::endl;
709 ofs <<
"DepthMapFactor: " << 1000.0 << std::endl;
712 if(!localIMUTransform.
isNull())
719 ofs <<
"Tbc: !!opencv-matrix" << std::endl;
720 ofs <<
" rows: 4" << std::endl;
721 ofs <<
" cols: 4" << std::endl;
722 ofs <<
" dt: f" << std::endl;
723 ofs <<
" data: [" << camImuT.
data()[0] <<
", " << camImuT.
data()[1] <<
", " << camImuT.
data()[2] <<
", " << camImuT.
data()[3] <<
", " << std::endl;
724 ofs <<
" " << camImuT.
data()[4] <<
", " << camImuT.
data()[5] <<
", " << camImuT.
data()[6] <<
", " << camImuT.
data()[7] <<
", " << std::endl;
725 ofs <<
" " << camImuT.
data()[8] <<
", " << camImuT.
data()[9] <<
", " << camImuT.
data()[10] <<
", " << camImuT.
data()[11] <<
", " << std::endl;
726 ofs <<
" 0.0, 0.0, 0.0, 1.0]" << std::endl;
729 ofs <<
"IMU.NoiseGyro: " << 1.7e-4 << std::endl;
730 ofs <<
"IMU.NoiseAcc: " << 2.0e-3 << std::endl;
731 ofs <<
"IMU.GyroWalk: " << 1.9393e-5 << std::endl;
732 ofs <<
"IMU.AccWalk: " << 3.e-3 << std::endl;
733 ofs <<
"IMU.Frequency: " << 200 << std::endl;
741 int features = rtabmap::Parameters::defaultOdomORBSLAMMaxFeatures();
743 ofs <<
"ORBextractor.nFeatures: " << features << std::endl;
747 double scaleFactor = rtabmap::Parameters::defaultORBScaleFactor();
749 ofs <<
"ORBextractor.scaleFactor: " << scaleFactor << std::endl;
753 int levels = rtabmap::Parameters::defaultORBNLevels();
755 ofs <<
"ORBextractor.nLevels: " << levels << std::endl;
762 int iniThFAST = rtabmap::Parameters::defaultFASTThreshold();
763 int minThFAST = rtabmap::Parameters::defaultFASTMinThreshold();
766 ofs <<
"ORBextractor.iniThFAST: " << iniThFAST << std::endl;
767 ofs <<
"ORBextractor.minThFAST: " << minThFAST << std::endl;
770 int maxFeatureMapSize = rtabmap::Parameters::defaultOdomORBSLAMMapSize();
776 mpKeyFrameDatabase =
new KeyFrameDatabase(*mpVocabulary);
779 #if RTABMAP_ORB_SLAM == 3 780 mpMap =
new Atlas(0);
782 mpMap =
new ORB_SLAM2::Map();
787 mpTracker =
new Tracker(0, mpVocabulary, 0, 0, mpMap, mpKeyFrameDatabase, configPath, stereo?System::STEREO:System::RGBD, maxFeatureMapSize);
790 #if RTABMAP_ORB_SLAM == 3 791 mpLocalMapper =
new LocalMapping(0, mpMap,
false, stereo && !localIMUTransform.
isNull());
793 mpLocalMapper =
new LocalMapping(mpMap,
false);
796 mpLoopCloser =
new LoopCloser(mpMap, mpKeyFrameDatabase, mpVocabulary,
true);
798 mptLocalMapping =
new thread(&LocalMapping::Run, mpLocalMapper);
799 mptLoopClosing =
new thread(&LoopCloser::RunNoLoop, mpLoopCloser);
802 mpTracker->SetLocalMapper(mpLocalMapper);
803 mpTracker->SetLoopClosing(mpLoopCloser);
804 mpTracker->SetViewer(0);
806 mpLocalMapper->SetTracker(mpTracker);
807 mpLocalMapper->SetLoopCloser(mpLoopCloser);
809 mpLoopCloser->SetTracker(mpTracker);
810 mpLoopCloser->SetLocalMapper(mpLocalMapper);
813 Frame::mbInitialComputations =
true;
815 #if RTABMAP_ORB_SLAM == 3 817 Verbose::SetTh(Verbose::VERBOSITY_QUIET);
819 mpTracker->Reset(
true);
825 virtual ~ORBSLAMSystem()
835 mpLocalMapper->RequestFinish();
836 mpLoopCloser->RequestFinish();
839 while(!mpLocalMapper->isFinished() || !mpLoopCloser->isFinished() || mpLoopCloser->isRunningGBA())
845 mptLoopClosing->join();
846 delete mptLoopClosing;
848 mptLocalMapping->join();
849 delete mptLocalMapping;
853 delete mpLocalMapper;
859 delete mpKeyFrameDatabase;
860 mpKeyFrameDatabase=0;
866 ORBVocabulary* mpVocabulary;
869 KeyFrameDatabase* mpKeyFrameDatabase;
872 #if RTABMAP_ORB_SLAM == 3 884 LocalMapping* mpLocalMapper;
888 LoopCloser* mpLoopCloser;
892 std::thread* mptLocalMapping;
893 std::thread* mptLoopClosing;
903 #ifdef RTABMAP_ORB_SLAM
911 #ifdef RTABMAP_ORB_SLAM 912 orbslam_ =
new ORBSLAMSystem(parameters);
918 #ifdef RTABMAP_ORB_SLAM 929 #ifdef RTABMAP_ORB_SLAM 932 orbslam_->shutdown();
935 originLocalTransform_.setNull();
936 previousPose_.setIdentity();
937 imuLocalTransform_.setNull();
943 #ifdef RTABMAP_ORB_SLAM 958 #ifdef RTABMAP_ORB_SLAM 961 #if RTABMAP_ORB_SLAM == 3 964 if(orbslam_->mpTracker == 0)
973 ORB_SLAM3::IMU::Point pt(
981 orbslam_->mpTracker->GrabImuData(pt);
984 if(data.
imageRaw().empty() || imuLocalTransform_.isNull())
995 UERROR(
"Not supported input! RGB (%dx%d) and depth (%dx%d) should have the same size.",
1005 UERROR(
"Invalid camera model!");
1010 if(!stereo && useIMU_)
1012 UWARN(
"Disabling IMU support (ORB_SLAM3 doesn't support IMU with RGB-D mode).");
1014 imuLocalTransform_.setNull();
1018 if(orbslam_->mpTracker == 0)
1021 if(!orbslam_->init(model, stereo, data.
stereoCameraModels()[0].baseline(), imuLocalTransform_))
1032 Tcw = ((Tracker*)orbslam_->mpTracker)->GrabImageStereo(data.
imageRaw(), data.
rightRaw(), data.
stamp());
1036 localTransform = data.
cameraModels()[0].localTransform();
1038 if(data.
depthRaw().type() == CV_32FC1)
1042 else if(data.
depthRaw().type() == CV_16UC1)
1046 Tcw = ((Tracker*)orbslam_->mpTracker)->GrabImageRGBD(data.
imageRaw(), depth, data.
stamp());
1050 if(orbslam_->mpTracker->mState == Tracking::LOST)
1052 covariance = cv::Mat::eye(6,6,CV_64FC1)*9999.0f;
1054 else if(Tcw.cols == 4 && Tcw.rows == 4)
1060 if(!localTransform.
isNull())
1062 if(originLocalTransform_.isNull())
1064 originLocalTransform_ = localTransform;
1067 p = originLocalTransform_ * p.
inverse() * localTransform.
inverse();
1069 t = previousPoseInv*p;
1076 covariance = cv::Mat::eye(6,6,CV_64FC1)*9999.0f;
1077 firstFrame_ =
false;
1082 if(baseline <= 0.0
f)
1084 baseline = rtabmap::Parameters::defaultOdomORBSLAMBf();
1087 double linearVar = 0.0001;
1090 linearVar = baseline/8.0;
1091 linearVar *= linearVar;
1094 covariance = cv::Mat::eye(6,6, CV_64FC1);
1095 covariance.at<
double>(0,0) = linearVar;
1096 covariance.at<
double>(1,1) = linearVar;
1097 covariance.at<
double>(2,2) = linearVar;
1098 covariance.at<
double>(3,3) = 0.0001;
1099 covariance.at<
double>(4,4) = 0.0001;
1100 covariance.at<
double>(5,5) = 0.0001;
1104 int totalMapPoints= 0;
1108 totalMapPoints = orbslam_->mpMap->MapPointsInMap();
1109 totalKfs = orbslam_->mpMap->KeyFramesInMap();
1122 const std::vector<cv::KeyPoint> & kpts = orbslam_->mpTracker->mCurrentFrame.mvKeys;
1126 for (
unsigned int i = 0; i < kpts.size(); ++i)
1129 if(orbslam_->mpTracker->mCurrentFrame.mvpMapPoints[i] != 0)
1131 wordId = orbslam_->mpTracker->mCurrentFrame.mvpMapPoints[i]->mnId;
1137 info->
words.insert(std::make_pair(wordId, kpts[i]));
1138 if(orbslam_->mpTracker->mCurrentFrame.mvpMapPoints[i] != 0)
1150 std::vector<MapPoint*> mapPoints = orbslam_->mpMap->GetAllMapPoints();
1151 Eigen::Affine3f fixRot = (this->
getPose()*previousPoseInv*originLocalTransform_).toEigen3f();
1152 for (
unsigned int i = 0; i < mapPoints.size(); ++i)
1154 cv::Point3f pt(mapPoints[i]->GetWorldPos());
1156 info->
localMap.insert(std::make_pair(mapPoints[i]->mnId, cv::Point3f(ptt.x, ptt.y, ptt.z)));
1161 UINFO(
"Odom update time = %fs, map points=%d, keyframes=%d, lost=%s", timer.
elapsed(), totalMapPoints, totalKfs, t.
isNull()?
"true":
"false");
1164 UERROR(
"RTAB-Map is not built with ORB_SLAM support! Select another visual odometry approach.");
static std::string homeDir()
static bool parse(const ParametersMap ¶meters, const std::string &key, bool &value)
cv::Mat RTABMAP_EXP cvtDepthToFloat(const cv::Mat &depth16U)
cv::Point3f RTABMAP_EXP transformPoint(const cv::Point3f &pt, const Transform &transform)
virtual void reset(const Transform &initialPose=Transform::getIdentity())
virtual void reset(const Transform &initialPose=Transform::getIdentity())
def init(descriptorDim, matchThreshold, iterations, cuda, model_path)
std::map< int, cv::Point3f > localMap
const cv::Mat & depthOrRightRaw() const
const std::vector< StereoCameraModel > & stereoCameraModels() const
virtual bool canProcessAsyncIMU() const
std::map< std::string, std::string > ParametersMap
std::vector< int > inliersIDs
#define UASSERT(condition)
Wrappers of STL for convenient functions.
bool isInfoDataFilled() const
const std::vector< CameraModel > & cameraModels() const
CvImagePtr cvtColor(const CvImageConstPtr &source, const std::string &encoding)
const cv::Mat & imageRaw() const
const Transform & localTransform() const
std::string UTILITE_EXP uReplaceChar(const std::string &str, char before, char after)
static ULogger::Level level()
const Transform & localTransform() const
const cv::Vec3d & angularVelocity() const
std::multimap< int, cv::KeyPoint > words
ROSCONSOLE_DECL void shutdown()
ULogger class and convenient macros.
std::vector< int > matchesIDs
const cv::Vec3d linearAcceleration() const
virtual ~OdometryORBSLAM()
const Transform & getPose() const
virtual Transform computeTransform(SensorData &image, const Transform &guess=Transform(), OdometryInfo *info=0)