Go to the documentation of this file.
28 #ifndef CAMERAMOBILE_H_
29 #define CAMERAMOBILE_H_
39 #include <boost/thread/mutex.hpp>
48 virtual std::string
getClassName()
const {
return "CameraInfoEvent";}
50 const std::string &
key()
const {
return key_;}
81 const cv::Mat & pointCloudData,
86 std::vector<cv::KeyPoint> * kpts = 0,
87 std::vector<cv::Point3f> * kpts3D = 0,
95 virtual bool init(
const std::string & calibrationFolder =
".",
const std::string & cameraName =
"");
97 virtual std::string
getSerial()
const {
return "CameraMobile";}
107 virtual bool getPose(
double epochStamp,
Transform & pose, cv::Mat & covariance,
double maxWaitTime = 0.06);
static LaserScan scanFromPointCloudData(const cv::Mat &pointCloudData, int points, const Transform &pose, const CameraModel &model, const cv::Mat &rgb, std::vector< cv::KeyPoint > *kpts=0, std::vector< cv::Point3f > *kpts3D=0, int kptsSize=3)
const std::string & key() const
virtual SensorData captureImage(SensorCaptureInfo *info=0)
virtual bool isCalibrated() const
ScreenRotation colorCameraToDisplayRotation_
const Transform & pose() const
glm::mat4 projectionMatrix_
void setGPS(const GPS &gps)
const Transform & getOriginOffset() const
CameraModel occlusionModel_
Expression< Point2 > projection(f, p_cam)
virtual std::string getSerial() const
bool uvsInitialized() const
float transformed_uvs_[8]
virtual std::string getClassName() const
void addEnvSensor(int type, float value)
void setSmoothing(bool enabled)
Transform deviceTColorCamera_
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy y set format x g set format y g set format x2 g set format y2 g set format z g set angles radians set nogrid set key title set key left top Right noreverse box linetype linewidth samplen spacing width set nolabel set noarrow set nologscale set logscale x set set pointsize set encoding default set nopolar set noparametric set view
ScreenRotation getScreenRotation() const
EnvSensors lastEnvSensors_
void update(const SensorData &data, const Transform &pose, const glm::mat4 &viewMatrix, const glm::mat4 &projectionMatrix, const float *texCoord)
virtual bool getPose(double epochStamp, Transform &pose, cv::Mat &covariance, double maxWaitTime=0.06)
virtual std::string getClassName() const
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
static const rtabmap::Transform opticalRotation
void poseReceived(const Transform &pose, double deviceStamp)
CameraMobile(bool smoothing=false)
virtual void setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)
void getVPMatrices(glm::mat4 &view, glm::mat4 &projection) const
virtual SensorData updateDataOnRender(Transform &pose)
PoseEvent(const Transform &pose)
std::map< EnvSensor::Type, EnvSensor > EnvSensors
double getStampEpochOffset() const
const cv::Mat & getOcclusionImage(CameraModel *model=0) const
static const float bilateralFilteringSigmaS
const float * uvsTransformed() const
static const float bilateralFilteringSigmaR
const Transform & getDeviceTColorCamera() const
static const rtabmap::Transform opticalRotationInv
void setOcclusionImage(const cv::Mat &image, const CameraModel &model)
const std::string & value() const
CameraInfoEvent(int type, const std::string &key, const std::string &value)
virtual bool odomProvided() const
const CameraModel & getCameraModel() const
std::map< double, Transform > poseBuffer_
rtabmap
Author(s): Mathieu Labbe
autogenerated on Thu Jul 25 2024 02:50:07