stereo_camera_model.cpp
Go to the documentation of this file.
00001 #include "image_geometry/stereo_camera_model.h"
00002 
00003 namespace image_geometry {
00004 
00005 StereoCameraModel::StereoCameraModel()
00006   : Q_(0.0)
00007 {
00008   Q_(0,0) = Q_(1,1) = 1.0;
00009 }
00010 
00011 StereoCameraModel::StereoCameraModel(const StereoCameraModel& other)
00012   : left_(other.left_), right_(other.right_),
00013     Q_(0.0)
00014 {
00015   Q_(0,0) = Q_(1,1) = 1.0;
00016   if (other.initialized())
00017     updateQ();
00018 }
00019 
00020 StereoCameraModel& StereoCameraModel::operator=(const StereoCameraModel& other)
00021 {
00022   if (other.initialized())
00023     this->fromCameraInfo(other.left_.cameraInfo(), other.right_.cameraInfo());
00024   return *this;
00025 }
00026 
00027 bool StereoCameraModel::fromCameraInfo(const sensor_msgs::CameraInfo& left,
00028                                        const sensor_msgs::CameraInfo& right)
00029 {
00030   bool changed_left  = left_.fromCameraInfo(left);
00031   bool changed_right = right_.fromCameraInfo(right);
00032   bool changed = changed_left || changed_right;
00033 
00034   // Note: don't require identical time stamps to allow imperfectly synced stereo.
00035   assert( left_.tfFrame() == right_.tfFrame() );
00036   assert( left_.fx() == right_.fx() );
00037   assert( left_.fy() == right_.fy() );
00038   assert( left_.cy() == right_.cy() );
00039   // cx may differ for verged cameras
00040 
00041   if (changed)
00042     updateQ();
00043 
00044   return changed;
00045 }
00046 
00047 bool StereoCameraModel::fromCameraInfo(const sensor_msgs::CameraInfoConstPtr& left,
00048                                        const sensor_msgs::CameraInfoConstPtr& right)
00049 {
00050   return fromCameraInfo(*left, *right);
00051 }
00052 
00053 void StereoCameraModel::updateQ()
00054 {
00055   // Update variable fields of reprojection matrix
00056   /*
00057     From Springer Handbook of Robotics, p. 524:
00058 
00059          [ Fx    0  Cx   0   ]
00060     P  = [ 0     Fy Cy   0   ]
00061          [ 0     0  1    0   ]
00062 
00063          [ Fx    0  Cx' FxTx ]
00064     P' = [ 0     Fy Cy   0    ]
00065          [ 0     0  1    0    ]
00066     where primed parameters are from the left projection matrix, unprimed from the right.
00067 
00068     [u   v 1]^T = P  * [x y z 1]^T
00069     [u-d v 1]^T = P' * [x y z 1]^T
00070 
00071     Combining the two equations above results in the following equation
00072 
00073     [u v u-d 1]^T = [ Fx   0    Cx   0    ] * [ x y z 1]^T
00074                     [ 0    Fy   Cy   0    ]
00075                     [ Fx   0    Cx'  FxTx ]
00076                     [ 0    0    1    0    ]
00077 
00078     Subtracting the 3rd from from the first and inverting the expression
00079     results in the following equation.
00080 
00081     [x y z 1]^T = Q * [u v d 1]^T
00082 
00083     Where Q is defined as
00084 
00085     Q = [ FyTx  0     0   -FyCxTx     ]
00086         [ 0     FxTx  0   -FxCyTx     ]
00087         [ 0     0     0    FxFyTx     ]
00088         [ 0     0     -Fy  Fy(Cx-Cx') ]
00089 
00090    Using the assumption Fx = Fy Q can be simplified to the following. But for
00091    compatibility with stereo cameras with different focal lengths we will use
00092    the full Q matrix.
00093 
00094         [ 1 0   0      -Cx      ]
00095     Q = [ 0 1   0      -Cy      ]
00096         [ 0 0   0       Fx      ]
00097         [ 0 0 -1/Tx (Cx-Cx')/Tx ]
00098 
00099     Disparity = x_left - x_right
00100 
00101     For compatibility with stereo cameras with different focal lengths we will use
00102     the full Q matrix.
00103 
00104    */
00105   double Tx = -baseline(); // The baseline member negates our Tx. Undo this negation
00106   Q_(0,0) =  left_.fy() * Tx;
00107   Q_(0,3) = -left_.fy() * left_.cx() * Tx;
00108   Q_(1,1) =  left_.fx() * Tx;
00109   Q_(1,3) = -left_.fx() * left_.cy() * Tx;
00110   Q_(2,3) =  left_.fx() * left_.fy() * Tx;
00111   Q_(3,2) = -left_.fy();
00112   Q_(3,3) =  left_.fy() * (left_.cx() - right_.cx()); // zero when disparities are pre-adjusted
00113 }
00114 
00115 void StereoCameraModel::projectDisparityTo3d(const cv::Point2d& left_uv_rect, float disparity,
00116                                              cv::Point3d& xyz) const
00117 {
00118   assert( initialized() );
00119 
00120   // Do the math inline:
00121   // [X Y Z W]^T = Q * [u v d 1]^T
00122   // Point = (X/W, Y/W, Z/W)
00123   // cv::perspectiveTransform could be used but with more overhead.
00124   double u = left_uv_rect.x, v = left_uv_rect.y;
00125   cv::Point3d XYZ( (Q_(0,0) * u) + Q_(0,3), (Q_(1,1) * v) + Q_(1,3), Q_(2,3));
00126   double W = Q_(3,2)*disparity + Q_(3,3);
00127   xyz = XYZ * (1.0/W);
00128 }
00129 
00130 const double StereoCameraModel::MISSING_Z = 10000.;
00131 
00132 void StereoCameraModel::projectDisparityImageTo3d(const cv::Mat& disparity, cv::Mat& point_cloud,
00133                                                   bool handleMissingValues) const
00134 {
00135   assert( initialized() );
00136 
00137   cv::reprojectImageTo3D(disparity, point_cloud, Q_, handleMissingValues);
00138 }
00139 
00140 } //namespace image_geometry


image_geometry
Author(s): Patrick Mihelich
autogenerated on Wed Aug 9 2017 02:51:49