System.h
Go to the documentation of this file.
1 
22 #ifndef SYSTEM_H
23 #define SYSTEM_H
24 
25 #include<string>
26 #include<thread>
27 #include<opencv2/core/core.hpp>
28 
29 #include "Tracking.h"
30 #include "FrameDrawer.h"
31 #include "MapDrawer.h"
32 #include "Map.h"
33 #include "LocalMapping.h"
34 #include "LoopClosing.h"
35 #include "KeyFrameDatabase.h"
36 #include "ORBVocabulary.h"
37 #include "Viewer.h"
38 
39 // for point cloud viewing
40 #include "pointcloudmapping.h"
41 #include <pcl/common/transforms.h>
42 #include <pcl/point_types.h>
43 #include <pcl/filters/voxel_grid.h>
44 #include <condition_variable>
45 #include <pcl/io/pcd_io.h>
46 #include "DataCache/DataCache.h"
47 
48 using namespace NS_DataCache;
49 
50 class PointCloudMapping;
51 
52 namespace ORB_SLAM2
53 {
54 
55 class Viewer;
56 class FrameDrawer;
57 class Map;
58 class Tracking;
59 class LocalMapping;
60 class LoopClosing;
61 
62 class System
63 {
64 public:
65  // Input sensor
66  enum eSensor{
67  MONOCULAR=0,
68  STEREO=1,
69  RGBD=2
70  };
71  typedef pcl::PointXYZRGBA PointT;
72  typedef pcl::PointCloud<PointT> PointCloud;
73 public:
74 
75  DataCache<cv::Mat>* GetTcwDataCache();
76  int32_t GetTcwDataCacheIndex();
77  shared_ptr<PointCloudMapping> GetPointCloudMapping();
78  Map* getMap() {
79  return mpMap;
80  }
81  Tracking* getTracker(){ return mpTracker; }
82  LocalMapping* getLocalMapping(){ return mpLocalMapper; }
83  LoopClosing* getLoopClosing(){ return mpLoopCloser; }
84 
85 
86  // Initialize the SLAM system. It launches the Local Mapping, Loop Closing and Viewer threads.
87  System(const string &strVocFile, const string &strSettingsFile, const eSensor sensor, const bool bUseViewer = true);
88 
89  // Proccess the given stereo frame. Images must be synchronized and rectified.
90  // Input images: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
91  // Returns the camera pose (empty if tracking fails).
92  cv::Mat TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timestamp);
93 
94  // Process the given rgbd frame. Depthmap must be registered to the RGB frame.
95  // Input image: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
96  // Input depthmap: Float (CV_32F).
97  // Returns the camera pose (empty if tracking fails).
98  cv::Mat TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, const double &timestamp);
99 
100  // Proccess the given monocular frame
101  // Input images: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
102  // Returns the camera pose (empty if tracking fails).
103  cv::Mat TrackMonocular(const cv::Mat &im, const double &timestamp);
104 
105  // This stops local mapping thread (map building) and performs only camera tracking.
106  void ActivateLocalizationMode();
107  // This resumes local mapping thread and performs SLAM again.
108  void DeactivateLocalizationMode();
109 
110  // Returns true if there have been a big map change (loop closure, global BA)
111  // since last call to this function
112  bool MapChanged();
113 
114  // Reset the system (clear map)
115  void Reset();
116 
117  // All threads will be requested to finish.
118  // It waits until all threads have finished.
119  // This function must be called before saving the trajectory.
120  void Shutdown();
121 
122  // Save camera trajectory in the TUM RGB-D dataset format.
123  // Only for stereo and RGB-D. This method does not work for monocular.
124  // Call first Shutdown()
125  // See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset
126  void SaveTrajectoryTUM(const string &filename);
127 
128  // Save keyframe poses in the TUM RGB-D dataset format.
129  // This method works for all sensor input.
130  // Call first Shutdown()
131  // See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset
132  void SaveKeyFrameTrajectoryTUM(const string &filename);
133 
134  // Save camera trajectory in the KITTI dataset format.
135  // Only for stereo and RGB-D. This method does not work for monocular.
136  // Call first Shutdown()
137  // See format details at: http://www.cvlibs.net/datasets/kitti/eval_odometry.php
138  void SaveTrajectoryKITTI(const string &filename);
139 
140  void Save2dMapPointsTUM(const string &filename, const int x, const int y);
141  void SaveGridMapTUM(const string &filename);
142 
143 
144  // TODO: Save/Load functions
145  // SaveMap(const string &filename);
146  // LoadMap(const string &filename);
147 
148  // Information from most recent processed frame
149  // You can call this right after TrackMonocular (or stereo or RGBD)
150  int GetTrackingState();
151  std::vector<MapPoint*> GetTrackedMapPoints();
152  std::vector<cv::KeyPoint> GetTrackedKeyPointsUn();
153 
154 private:
155 
156  //缓冲区
159 
160  // Input sensor
162 
163  // ORB vocabulary used for place recognition and feature matching.
165 
166  // KeyFrame database for place recognition (relocalization and loop detection).
168 
169  // Map structure that stores the pointers to all KeyFrames and MapPoints.
171 
172  // Tracker. It receives a frame and computes the associated camera pose.
173  // It also decides when to insert a new keyframe, create some new MapPoints and
174  // performs relocalization if tracking fails.
176 
177  // Local Mapper. It manages the local map and performs local bundle adjustment.
179 
180  // Loop Closer. It searches loops with every new keyframe. If there is a loop it performs
181  // a pose graph optimization and full bundle adjustment (in a new thread) afterwards.
183 
184  // The viewer draws the map and the current camera pose. It uses Pangolin.
186 
189 
190  // System threads: Local Mapping, Loop Closing, Viewer.
191  // The Tracking thread "lives" in the main execution thread that creates the System object.
192  std::thread* mptLocalMapping;
193  std::thread* mptLoopClosing;
194  std::thread* mptViewer;
195 
196  // Reset flag
197  std::mutex mMutexReset;
198  bool mbReset;
199 
200  // Change mode flags
201  std::mutex mMutexMode;
204 
205  // Tracking state
207  std::vector<MapPoint*> mTrackedMapPoints;
208  std::vector<cv::KeyPoint> mTrackedKeyPointsUn;
209  std::mutex mMutexState;
210 
211  // point cloud mapping
212  shared_ptr<PointCloudMapping> mpPointCloudMapping;
213 };
214 
215 }// namespace ORB_SLAM
216 
217 #endif // SYSTEM_H
KeyFrameDatabase * mpKeyFrameDatabase
Definition: System.h:167
ORBVocabulary * mpVocabulary
Definition: System.h:164
filename
std::vector< MapPoint * > mTrackedMapPoints
Definition: System.h:207
std::thread * mptViewer
Definition: System.h:194
std::thread * mptLocalMapping
Definition: System.h:192
LoopClosing * mpLoopCloser
Definition: System.h:182
LoopClosing * getLoopClosing()
Definition: System.h:83
MapDrawer * mpMapDrawer
Definition: System.h:188
LocalMapping * mpLocalMapper
Definition: System.h:178
shared_ptr< PointCloudMapping > mpPointCloudMapping
Definition: System.h:212
int32_t miDataCacheIndex
Definition: System.h:158
TFSIMD_FORCE_INLINE const tfScalar & y() const
Viewer * mpViewer
Definition: System.h:185
int mTrackingState
Definition: System.h:206
Tracking * mpTracker
Definition: System.h:175
Map * getMap()
Definition: System.h:78
LocalMapping * getLocalMapping()
Definition: System.h:82
Tracking * getTracker()
Definition: System.h:81
std::thread * mptLoopClosing
Definition: System.h:193
std::vector< cv::KeyPoint > mTrackedKeyPointsUn
Definition: System.h:208
bool mbActivateLocalizationMode
Definition: System.h:202
eSensor mSensor
Definition: System.h:161
pcl::PointCloud< PointT > PointCloud
Definition: System.h:72
TFSIMD_FORCE_INLINE const tfScalar & x() const
std::mutex mMutexState
Definition: System.h:209
bool mbDeactivateLocalizationMode
Definition: System.h:203
std::mutex mMutexMode
Definition: System.h:201
FrameDrawer * mpFrameDrawer
Definition: System.h:187
std::mutex mMutexReset
Definition: System.h:197
DataCache< cv::Mat > * mpTcwDataCache
Definition: System.h:157
pcl::PointXYZRGBA PointT
Definition: System.h:71


orb_slam2_with_maps_odom
Author(s): teng zhang
autogenerated on Fri Sep 25 2020 03:24:47