38 #include <pcl/common/common.h> 49 const std::map<int, cv::Point3f> & words3A,
50 const std::map<int, cv::KeyPoint> & words2B,
58 const std::map<int, cv::Point3f> & words3B,
60 std::vector<int> * matchesOut,
61 std::vector<int> * inliersOut)
66 std::vector<int> matches, inliers;
70 *covariance = cv::Mat::eye(6,6,CV_64FC1);
74 std::vector<int> ids =
uKeys(words2B);
75 std::vector<cv::Point3f> objectPoints(ids.size());
76 std::vector<cv::Point2f> imagePoints(ids.size());
78 matches.resize(ids.size());
79 for(
unsigned int i=0; i<ids.size(); ++i)
81 std::map<int, cv::Point3f>::const_iterator iter=words3A.find(ids[i]);
84 const cv::Point3f & pt = iter->second;
85 objectPoints[oi].x = pt.x;
86 objectPoints[oi].y = pt.y;
87 objectPoints[oi].z = pt.z;
88 imagePoints[oi] = words2B.find(ids[i])->second.pt;
89 matches[oi++] = ids[i];
93 objectPoints.resize(oi);
94 imagePoints.resize(oi);
97 UDEBUG(
"words3A=%d words2B=%d matches=%d words3B=%d guess=%s reprojError=%f iterations=%d",
98 (
int)words3A.size(), (int)words2B.size(), (int)matches.size(), (int)words3B.size(),
99 guess.
prettyPrint().c_str(), reprojError, iterations);
101 if((
int)matches.size() >= minInliers)
104 cv::Mat K = cameraModel.
K();
105 cv::Mat D = cameraModel.
D();
107 cv::Mat R = (cv::Mat_<double>(3,3) <<
108 (
double)guessCameraFrame.
r11(), (double)guessCameraFrame.
r12(), (double)guessCameraFrame.
r13(),
109 (double)guessCameraFrame.
r21(), (double)guessCameraFrame.
r22(), (double)guessCameraFrame.
r23(),
110 (double)guessCameraFrame.
r31(), (double)guessCameraFrame.
r32(), (double)guessCameraFrame.
r33());
112 cv::Mat rvec(1,3, CV_64FC1);
113 cv::Rodrigues(R, rvec);
114 cv::Mat tvec = (cv::Mat_<double>(1,3) <<
115 (
double)guessCameraFrame.
x(), (double)guessCameraFrame.
y(), (double)guessCameraFrame.
z());
132 if((
int)inliers.size() >= minInliers)
134 cv::Rodrigues(rvec, R);
135 Transform pnp(R.at<
double>(0,0), R.at<
double>(0,1), R.at<
double>(0,2), tvec.at<
double>(0),
136 R.at<
double>(1,0), R.at<
double>(1,1), R.at<
double>(1,2), tvec.at<
double>(1),
137 R.at<
double>(2,0), R.at<
double>(2,1), R.at<
double>(2,2), tvec.at<
double>(2));
142 if(covariance && words3B.size())
144 std::vector<float> errorSqrdDists(inliers.size());
145 std::vector<float> errorSqrdAngles(inliers.size());
147 for(
unsigned int i=0; i<inliers.size(); ++i)
149 std::map<int, cv::Point3f>::const_iterator iter = words3B.find(matches[inliers[i]]);
152 const cv::Point3f & objPt = objectPoints[inliers[i]];
154 errorSqrdDists[oi] =
uNormSquared(objPt.x-newPt.x, objPt.y-newPt.y, objPt.z-newPt.z);
156 Eigen::Vector4f v1(objPt.x - transform.x(), objPt.y - transform.y(), objPt.z - transform.z(), 0);
157 Eigen::Vector4f v2(newPt.x - transform.x(), newPt.y - transform.y(), newPt.z - transform.z(), 0);
158 errorSqrdAngles[oi++] = pcl::getAngle3D(v1, v2);
162 errorSqrdDists.resize(oi);
163 errorSqrdAngles.resize(oi);
164 if(errorSqrdDists.size())
166 std::sort(errorSqrdDists.begin(), errorSqrdDists.end());
168 double median_error_sqr = 2.1981 * (double)errorSqrdDists[errorSqrdDists.size () >> 2];
170 (*covariance)(cv::Range(0,3), cv::Range(0,3)) *= median_error_sqr;
171 std::sort(errorSqrdAngles.begin(), errorSqrdAngles.end());
172 median_error_sqr = 2.1981 * (double)errorSqrdAngles[errorSqrdAngles.size () >> 2];
174 (*covariance)(cv::Range(3,6), cv::Range(3,6)) *= median_error_sqr;
178 UWARN(
"Not enough close points to compute covariance!");
181 if(
float(oi) /
float(inliers.size()) < 0.2
f)
183 UWARN(
"A very low number of inliers have valid depth (%d/%d), the transform returned may be wrong!", oi, (
int)inliers.size());
189 std::vector<cv::Point2f> imagePointsReproj;
190 cv::projectPoints(objectPoints, rvec, tvec, K, cv::Mat(), imagePointsReproj);
192 for(
unsigned int i=0; i<inliers.size(); ++i)
194 err +=
uNormSquared(imagePoints.at(inliers[i]).x - imagePointsReproj.at(inliers[i]).x, imagePoints.at(inliers[i]).y - imagePointsReproj.at(inliers[i]).y);
197 *covariance *=
std::sqrt(err/
float(inliers.size()));
204 *matchesOut = matches;
208 inliersOut->resize(inliers.size());
209 for(
unsigned int i=0; i<inliers.size(); ++i)
211 inliersOut->at(i) = matches[inliers[i]];
219 const std::map<int, cv::Point3f> & words3A,
220 const std::map<int, cv::Point3f> & words3B,
222 double inliersDistance,
224 int refineIterations,
225 cv::Mat * covariance,
226 std::vector<int> * matchesOut,
227 std::vector<int> * inliersOut)
230 std::vector<cv::Point3f> inliers1;
231 std::vector<cv::Point3f> inliers2;
233 std::vector<int> matches;
241 UASSERT(inliers1.size() == inliers2.size());
242 UDEBUG(
"Unique correspondences = %d", (
int)inliers1.size());
246 *covariance = cv::Mat::eye(6,6,CV_64FC1);
249 std::vector<int> inliers;
250 if((
int)inliers1.size() >= minInliers)
252 pcl::PointCloud<pcl::PointXYZ>::Ptr inliers1cloud(
new pcl::PointCloud<pcl::PointXYZ>);
253 pcl::PointCloud<pcl::PointXYZ>::Ptr inliers2cloud(
new pcl::PointCloud<pcl::PointXYZ>);
254 inliers1cloud->resize(inliers1.size());
255 inliers2cloud->resize(inliers1.size());
256 for(
unsigned int i=0; i<inliers1.size(); ++i)
258 (*inliers1cloud)[i].x = inliers1[i].x;
259 (*inliers1cloud)[i].y = inliers1[i].y;
260 (*inliers1cloud)[i].z = inliers1[i].z;
261 (*inliers2cloud)[i].x = inliers2[i].x;
262 (*inliers2cloud)[i].y = inliers2[i].y;
263 (*inliers2cloud)[i].z = inliers2[i].z;
275 if(!t.
isNull() && (int)inliers.size() >= minInliers)
283 *matchesOut = matches;
287 inliersOut->resize(inliers.size());
288 for(
unsigned int i=0; i<inliers.size(); ++i)
290 inliersOut->at(i) = matches[inliers[i]];
299 std::vector<cv::Point3f> opoints,
300 std::vector<cv::Point2f> ipoints,
301 const cv::Mat & cameraMatrix,
302 const cv::Mat & distCoeffs,
303 const cv::Mat & rvec,
304 const cv::Mat & tvec,
305 float reprojErrorThreshold,
306 std::vector<int> & inliers)
308 UASSERT(opoints.size() == ipoints.size());
309 int count = (int)opoints.size();
311 std::vector<cv::Point2f> projpoints;
312 projectPoints(opoints, rvec, tvec, cameraMatrix, distCoeffs, projpoints);
314 inliers.resize(count,0);
315 std::vector<float> err(count);
317 for (
int i = 0; i < count; ++i)
319 float e = (float)cv::norm( ipoints[i] - projpoints[i]);
320 if(e <= reprojErrorThreshold)
332 const std::vector<cv::Point3f> & objectPoints,
333 const std::vector<cv::Point2f> & imagePoints,
334 const cv::Mat & cameraMatrix,
335 const cv::Mat & distCoeffs,
338 bool useExtrinsicGuess,
340 float reprojectionError,
342 std::vector<int> & inliers,
344 int refineIterations,
347 if(minInliersCount < 4)
368 float inlierThreshold = reprojectionError;
369 if((
int)inliers.size() >= minInliersCount && refineIterations>0)
371 float error_threshold = inlierThreshold;
372 int refine_iterations = 0;
373 bool inlier_changed =
false, oscillating =
false;
374 std::vector<int> new_inliers, prev_inliers = inliers;
375 std::vector<size_t> inliers_sizes;
377 cv::Mat new_model_rvec = rvec;
378 cv::Mat new_model_tvec = tvec;
383 std::vector<cv::Point3f> opoints_inliers(prev_inliers.size());
384 std::vector<cv::Point2f> ipoints_inliers(prev_inliers.size());
385 for(
unsigned int i=0; i<prev_inliers.size(); ++i)
387 opoints_inliers[i] = objectPoints[prev_inliers[i]];
388 ipoints_inliers[i] = imagePoints[prev_inliers[i]];
391 UDEBUG(
"inliers=%d refine_iterations=%d, rvec=%f,%f,%f tvec=%f,%f,%f", (
int)prev_inliers.size(), refine_iterations,
392 *new_model_rvec.ptr<
double>(0), *new_model_rvec.ptr<
double>(1), *new_model_rvec.ptr<
double>(2),
393 *new_model_tvec.ptr<
double>(0), *new_model_tvec.ptr<
double>(1), *new_model_tvec.ptr<
double>(2));
396 cv::solvePnP(opoints_inliers, ipoints_inliers, cameraMatrix, distCoeffs, new_model_rvec, new_model_tvec,
true, flags);
397 inliers_sizes.push_back(prev_inliers.size());
399 UDEBUG(
"rvec=%f,%f,%f tvec=%f,%f,%f",
400 *new_model_rvec.ptr<
double>(0), *new_model_rvec.ptr<
double>(1), *new_model_rvec.ptr<
double>(2),
401 *new_model_tvec.ptr<
double>(0), *new_model_tvec.ptr<
double>(1), *new_model_tvec.ptr<
double>(2));
404 std::vector<float> err =
computeReprojErrors(objectPoints, imagePoints, cameraMatrix, distCoeffs, new_model_rvec, new_model_tvec, error_threshold, new_inliers);
405 UDEBUG(
"RANSAC refineModel: Number of inliers found (before/after): %d/%d, with an error threshold of %f.",
406 (
int)prev_inliers.size (), (int)new_inliers.size (), error_threshold);
408 if ((
int)new_inliers.size() < minInliersCount)
411 if (refine_iterations >= refineIterations)
419 float m =
uMean(err.data(), err.size());
420 float variance =
uVariance(err.data(), err.size());
421 error_threshold =
std::min(inlierThreshold, refineSigma *
float(
sqrt(variance)));
423 UDEBUG (
"RANSAC refineModel: New estimated error threshold: %f (variance=%f mean=%f) on iteration %d out of %d.",
424 error_threshold, variance, m, refine_iterations, refineIterations);
425 inlier_changed =
false;
426 std::swap (prev_inliers, new_inliers);
429 if (new_inliers.size () != prev_inliers.size ())
432 if ((
int)inliers_sizes.size () >= minInliersCount)
434 if (inliers_sizes[inliers_sizes.size () - 1] == inliers_sizes[inliers_sizes.size () - 3] &&
435 inliers_sizes[inliers_sizes.size () - 2] == inliers_sizes[inliers_sizes.size () - 4])
441 inlier_changed =
true;
446 for (
size_t i = 0; i < prev_inliers.size (); ++i)
449 if (prev_inliers[i] != new_inliers[i])
451 inlier_changed =
true;
456 while (inlier_changed && ++refine_iterations < refineIterations);
459 if ((
int)prev_inliers.size() < minInliersCount)
461 UWARN (
"RANSAC refineModel: Refinement failed: got very low inliers (%d)!", (
int)prev_inliers.size());
466 UDEBUG(
"RANSAC refineModel: Detected oscillations in the model refinement.");
469 std::swap (inliers, new_inliers);
470 rvec = new_model_rvec;
471 tvec = new_model_tvec;
T uVariance(const T *v, unsigned int size, T meanV)
bool RTABMAP_EXP isFinite(const cv::Point3f &pt)
Transform RTABMAP_EXP estimateMotion3DTo2D(const std::map< int, cv::Point3f > &words3A, const std::map< int, cv::KeyPoint > &words2B, const CameraModel &cameraModel, int minInliers=10, int iterations=100, double reprojError=5., int flagsPnP=0, int pnpRefineIterations=1, const Transform &guess=Transform::getIdentity(), const std::map< int, cv::Point3f > &words3B=std::map< int, cv::Point3f >(), cv::Mat *covariance=0, std::vector< int > *matchesOut=0, std::vector< int > *inliersOut=0)
T uMean(const T *v, unsigned int size)
GLM_FUNC_DECL genType min(genType const &x, genType const &y)
cv::Point3f RTABMAP_EXP transformPoint(const cv::Point3f &pt, const Transform &transform)
std::vector< K > uKeys(const std::multimap< K, V > &mm)
GLM_FUNC_DECL vecType< T, P > sqrt(vecType< T, P > const &x)
GLM_FUNC_DECL genType e()
Basic mathematics functions.
Transform RTABMAP_EXP transformFromXYZCorrespondences(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &cloud1, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &cloud2, double inlierThreshold=0.02, int iterations=100, int refineModelIterations=10, double refineModelSigma=3.0, std::vector< int > *inliers=0, cv::Mat *variance=0)
bool solvePnPRansac(InputArray _opoints, InputArray _ipoints, InputArray _cameraMatrix, InputArray _distCoeffs, OutputArray _rvec, OutputArray _tvec, bool useExtrinsicGuess, int iterationsCount, float reprojectionError, double confidence, OutputArray _inliers, int flags)
bool uIsFinite(const T &value)
#define UASSERT(condition)
Wrappers of STL for convenient functions.
Transform RTABMAP_EXP estimateMotion3DTo3D(const std::map< int, cv::Point3f > &words3A, const std::map< int, cv::Point3f > &words3B, int minInliers=10, double inliersDistance=0.1, int iterations=100, int refineIterations=5, cv::Mat *covariance=0, std::vector< int > *matchesOut=0, std::vector< int > *inliersOut=0)
void RTABMAP_EXP solvePnPRansac(const std::vector< cv::Point3f > &objectPoints, const std::vector< cv::Point2f > &imagePoints, const cv::Mat &cameraMatrix, const cv::Mat &distCoeffs, cv::Mat &rvec, cv::Mat &tvec, bool useExtrinsicGuess, int iterationsCount, float reprojectionError, int minInliersCount, std::vector< int > &inliers, int flags, int refineIterations=1, float refineSigma=3.0f)
bool isValidForProjection() const
T uNormSquared(const std::vector< T > &v)
std::vector< float > computeReprojErrors(std::vector< cv::Point3f > opoints, std::vector< cv::Point2f > ipoints, const cv::Mat &cameraMatrix, const cv::Mat &distCoeffs, const cv::Mat &rvec, const cv::Mat &tvec, float reprojErrorThreshold, std::vector< int > &inliers)
ULogger class and convenient macros.
void RTABMAP_EXP findCorrespondences(const std::multimap< int, cv::KeyPoint > &wordsA, const std::multimap< int, cv::KeyPoint > &wordsB, std::list< std::pair< cv::Point2f, cv::Point2f > > &pairs)
const Transform & localTransform() const
GLM_FUNC_DECL matType< T, P > inverse(matType< T, P > const &m)