38 #include <pcl/common/common.h> 43 #include <opengv/absolute_pose/methods.hpp> 44 #include <opengv/absolute_pose/NoncentralAbsoluteAdapter.hpp> 45 #include <opengv/sac/Ransac.hpp> 46 #include <opengv/sac_problems/absolute_pose/AbsolutePoseSacProblem.hpp> 56 const std::map<int, cv::Point3f> & words3A,
57 const std::map<int, cv::KeyPoint> & words2B,
66 const std::map<int, cv::Point3f> & words3B,
68 std::vector<int> * matchesOut,
69 std::vector<int> * inliersOut)
74 std::vector<int>
matches, inliers;
78 *covariance = cv::Mat::eye(6,6,CV_64FC1);
82 std::vector<int> ids =
uKeys(words2B);
83 std::vector<cv::Point3f> objectPoints(ids.size());
84 std::vector<cv::Point2f> imagePoints(ids.size());
86 matches.resize(ids.size());
87 for(
unsigned int i=0; i<ids.size(); ++i)
89 std::map<int, cv::Point3f>::const_iterator iter=words3A.find(ids[i]);
92 const cv::Point3f & pt = iter->second;
93 objectPoints[oi].x = pt.x;
94 objectPoints[oi].y = pt.y;
95 objectPoints[oi].z = pt.z;
96 imagePoints[oi] = words2B.find(ids[i])->second.pt;
97 matches[oi++] = ids[i];
101 objectPoints.resize(oi);
102 imagePoints.resize(oi);
105 UDEBUG(
"words3A=%d words2B=%d matches=%d words3B=%d guess=%s reprojError=%f iterations=%d",
106 (
int)words3A.size(), (int)words2B.size(), (int)matches.size(), (int)words3B.size(),
107 guess.
prettyPrint().c_str(), reprojError, iterations);
109 if((
int)matches.size() >= minInliers)
112 cv::Mat K = cameraModel.
K();
113 cv::Mat D = cameraModel.
D();
115 cv::Mat R = (cv::Mat_<double>(3,3) <<
116 (
double)guessCameraFrame.
r11(), (double)guessCameraFrame.
r12(), (double)guessCameraFrame.
r13(),
117 (double)guessCameraFrame.
r21(), (double)guessCameraFrame.
r22(), (double)guessCameraFrame.
r23(),
118 (double)guessCameraFrame.
r31(), (double)guessCameraFrame.
r32(), (double)guessCameraFrame.
r33());
120 cv::Mat rvec(3,1, CV_64FC1);
121 cv::Rodrigues(R, rvec);
122 cv::Mat tvec = (cv::Mat_<double>(3,1) <<
123 (
double)guessCameraFrame.
x(), (double)guessCameraFrame.
y(), (double)guessCameraFrame.
z());
140 if((
int)inliers.size() >= minInliers)
142 cv::Rodrigues(rvec, R);
143 Transform pnp(R.at<
double>(0,0), R.at<
double>(0,1), R.at<
double>(0,2), tvec.at<
double>(0),
144 R.at<
double>(1,0), R.at<
double>(1,1), R.at<
double>(1,2), tvec.at<
double>(1),
145 R.at<
double>(2,0), R.at<
double>(2,1), R.at<
double>(2,2), tvec.at<
double>(2));
150 if(covariance && (!words3B.empty() || cameraModel.
imageSize() != cv::Size()))
152 std::vector<float> errorSqrdDists(inliers.size());
153 std::vector<float> errorSqrdAngles(inliers.size());
156 for(
unsigned int i=0; i<inliers.size(); ++i)
158 cv::Point3f objPt = objectPoints[inliers[i]];
164 std::map<int, cv::Point3f>::const_iterator iter = words3B.find(matches[inliers[i]]);
175 imagePoints.at(inliers[i]).x,
176 imagePoints.at(inliers[i]).y,
182 newPt = cv::Point3f(ray.x(), ray.y(), ray.z()) * objPt.z*1.1;
185 errorSqrdDists[i] =
uNormSquared(objPt.x-newPt.x, objPt.y-newPt.y, objPt.z-newPt.z);
187 Eigen::Vector4f v1(objPt.x, objPt.y, objPt.z, 0);
188 Eigen::Vector4f v2(newPt.x, newPt.y, newPt.z, 0);
189 errorSqrdAngles[i] = pcl::getAngle3D(v1, v2);
192 std::sort(errorSqrdDists.begin(), errorSqrdDists.end());
194 double median_error_sqr_lin = 2.1981 * (double)errorSqrdDists[errorSqrdDists.size () >> 2];
196 (*covariance)(cv::Range(0,3), cv::Range(0,3)) *= median_error_sqr_lin;
197 std::sort(errorSqrdAngles.begin(), errorSqrdAngles.end());
198 double median_error_sqr_ang = 2.1981 * (double)errorSqrdAngles[errorSqrdAngles.size () >> 2];
200 (*covariance)(cv::Range(3,6), cv::Range(3,6)) *= median_error_sqr_ang;
202 if(maxVariance > 0 && median_error_sqr_lin > maxVariance)
204 UWARN(
"Rejected PnP transform, variance is too high! %f > %f!", median_error_sqr_lin, maxVariance);
205 *covariance = cv::Mat::eye(6,6,CV_64FC1);
212 std::vector<cv::Point2f> imagePointsReproj;
213 cv::projectPoints(objectPoints, rvec, tvec, K, cv::Mat(), imagePointsReproj);
215 for(
unsigned int i=0; i<inliers.size(); ++i)
217 err +=
uNormSquared(imagePoints.at(inliers[i]).x - imagePointsReproj.at(inliers[i]).x, imagePoints.at(inliers[i]).y - imagePointsReproj.at(inliers[i]).y);
220 *covariance *=
std::sqrt(err/
float(inliers.size()));
227 *matchesOut = matches;
231 inliersOut->resize(inliers.size());
232 for(
unsigned int i=0; i<inliers.size(); ++i)
234 inliersOut->at(i) = matches[inliers[i]];
242 const std::map<int, cv::Point3f> & words3A,
243 const std::map<int, cv::KeyPoint> & words2B,
244 const std::vector<CameraModel> & cameraModels,
249 int refineIterations,
252 const std::map<int, cv::Point3f> & words3B,
253 cv::Mat * covariance,
254 std::vector<int> * matchesOut,
255 std::vector<int> * inliersOut)
258 #ifndef RTABMAP_OPENGV 259 UERROR(
"This function is only available if rtabmap is built with OpenGV dependency.");
261 UASSERT(!cameraModels.empty() && cameraModels[0].imageWidth() > 0);
262 int subImageWidth = cameraModels[0].imageWidth();
263 for(
size_t i=0; i<cameraModels.size(); ++i)
265 UASSERT(cameraModels[i].isValidForProjection());
266 UASSERT(subImageWidth == cameraModels[i].imageWidth());
271 std::vector<int>
matches, inliers;
275 *covariance = cv::Mat::eye(6,6,CV_64FC1);
279 std::vector<int> ids =
uKeys(words2B);
280 std::vector<cv::Point3f> objectPoints(ids.size());
281 std::vector<cv::Point2f> imagePoints(ids.size());
283 matches.resize(ids.size());
284 std::vector<int> cameraIndexes(ids.size());
285 for(
unsigned int i=0; i<ids.size(); ++i)
287 std::map<int, cv::Point3f>::const_iterator iter=words3A.find(ids[i]);
290 const cv::Point2f & kpt = words2B.find(ids[i])->second.pt;
291 int cameraIndex = int(kpt.x / subImageWidth);
292 UASSERT_MSG(cameraIndex >= 0 && cameraIndex < (
int)cameraModels.size(),
293 uFormat(
"cameraIndex=%d, models=%d, kpt.x=%f, subImageWidth=%f (Camera model image width=%d)",
294 cameraIndex, (
int)cameraModels.size(), kpt.x, subImageWidth, cameraModels[cameraIndex].imageWidth()).c_str());
296 const cv::Point3f & pt = iter->second;
297 objectPoints[oi] = pt;
298 imagePoints[oi] = kpt;
300 imagePoints[oi].x = imagePoints[oi].x - (cameraIndex*subImageWidth);
301 cameraIndexes[oi] = cameraIndex;
302 matches[oi++] = ids[i];
306 objectPoints.resize(oi);
307 imagePoints.resize(oi);
308 cameraIndexes.resize(oi);
311 UDEBUG(
"words3A=%d words2B=%d matches=%d words3B=%d guess=%s reprojError=%f iterations=%d",
312 (
int)words3A.size(), (int)words2B.size(), (int)matches.size(), (int)words3B.size(),
313 guess.
prettyPrint().c_str(), reprojError, iterations);
315 if((
int)matches.size() >= minInliers)
318 opengv::translations_t camOffsets;
319 opengv::rotations_t camRotations;
320 for(
size_t i=0; i<cameraModels.size(); ++i)
322 camOffsets.push_back(opengv::translation_t(
323 cameraModels[i].localTransform().
x(),
324 cameraModels[i].localTransform().y(),
325 cameraModels[i].localTransform().z()));
326 camRotations.push_back(cameraModels[i].localTransform().toEigen4d().block<3,3>(0, 0));
330 opengv::points_t points;
332 opengv::bearingVectors_t bearingVectors;
333 opengv::absolute_pose::NoncentralAbsoluteAdapter::camCorrespondences_t camCorrespondences;
334 for(
size_t i=0; i<objectPoints.size(); ++i)
336 int cameraIndex = cameraIndexes[i];
337 points.push_back(opengv::point_t(objectPoints[i].
x,objectPoints[i].y,objectPoints[i].z));
339 cameraModels[cameraIndex].project(imagePoints[i].x, imagePoints[i].y, 1, pt[0], pt[1], pt[2]);
341 bearingVectors.push_back(opengv::bearingVector_t(pt[0], pt[1], pt[2]));
342 camCorrespondences.push_back(cameraIndex);
346 opengv::absolute_pose::NoncentralAbsoluteAdapter adapter(
353 adapter.setR(guess.
toEigen4d().block<3,3>(0, 0));
354 adapter.sett(opengv::translation_t(guess.
x(), guess.
y(), guess.
z()));
358 opengv::sac::Ransac<opengv::sac_problems::absolute_pose::AbsolutePoseSacProblem> ransac;
359 std::shared_ptr<opengv::sac_problems::absolute_pose::AbsolutePoseSacProblem> absposeproblem_ptr(
360 new opengv::sac_problems::absolute_pose::AbsolutePoseSacProblem(adapter, opengv::sac_problems::absolute_pose::AbsolutePoseSacProblem::GP3P));
362 ransac.sac_model_ = absposeproblem_ptr;
363 ransac.threshold_ = 1.0 -
cos(
atan(reprojError/cameraModels[0].fx()));
364 ransac.max_iterations_ = iterations;
365 UDEBUG(
"Ransac params: threshold = %f (reprojError=%f fx=%f), max iterations=%d", ransac.threshold_, reprojError, cameraModels[0].fx(), ransac.max_iterations_);
368 ransac.computeModel();
373 UDEBUG(
"Ransac iterations done: %d", ransac.iterations_);
374 inliers = ransac.inliers_;
375 UDEBUG(
"Ransac inliers: %ld", inliers.size());
377 if((
int)inliers.size() >= minInliers)
384 std::vector<float> errorSqrdDists(inliers.size());
385 std::vector<float> errorSqrdAngles(inliers.size());
386 for(
unsigned int i=0; i<inliers.size(); ++i)
388 cv::Point3f objPt = objectPoints[inliers[i]];
390 int cameraIndex = cameraIndexes[inliers[i]];
391 Transform transformCameraFrameInv = (transform * cameraModels[cameraIndex].localTransform()).
inverse();
397 std::map<int, cv::Point3f>::const_iterator iter = words3B.find(matches[inliers[i]]);
407 cameraModels[cameraIndex].imageSize(),
408 imagePoints.at(inliers[i]).x,
409 imagePoints.at(inliers[i]).y,
410 cameraModels[cameraIndex].cx(),
411 cameraModels[cameraIndex].cy(),
412 cameraModels[cameraIndex].fx(),
413 cameraModels[cameraIndex].fy());
415 newPt = cv::Point3f(ray.x(), ray.y(), ray.z()) * objPt.z*1.1;
418 errorSqrdDists[i] =
uNormSquared(objPt.x-newPt.x, objPt.y-newPt.y, objPt.z-newPt.z);
420 Eigen::Vector4f v1(objPt.x, objPt.y, objPt.z, 0);
421 Eigen::Vector4f v2(newPt.x, newPt.y, newPt.z, 0);
422 errorSqrdAngles[i] = pcl::getAngle3D(v1, v2);
425 std::sort(errorSqrdDists.begin(), errorSqrdDists.end());
427 double median_error_sqr_lin = 2.1981 * (double)errorSqrdDists[errorSqrdDists.size () >> 2];
429 (*covariance)(cv::Range(0,3), cv::Range(0,3)) *= median_error_sqr_lin;
430 std::sort(errorSqrdAngles.begin(), errorSqrdAngles.end());
431 double median_error_sqr_ang = 2.1981 * (double)errorSqrdAngles[errorSqrdAngles.size () >> 2];
433 (*covariance)(cv::Range(3,6), cv::Range(3,6)) *= median_error_sqr_ang;
435 if(maxVariance > 0 && median_error_sqr_lin > maxVariance)
437 UWARN(
"Rejected PnP transform, variance is too high! %f > %f!", median_error_sqr_lin, maxVariance);
438 *covariance = cv::Mat::eye(6,6,CV_64FC1);
447 *matchesOut = matches;
451 inliersOut->resize(inliers.size());
452 for(
unsigned int i=0; i<inliers.size(); ++i)
454 inliersOut->at(i) = matches[inliers[i]];
462 const std::map<int, cv::Point3f> & words3A,
463 const std::map<int, cv::Point3f> & words3B,
465 double inliersDistance,
467 int refineIterations,
468 cv::Mat * covariance,
469 std::vector<int> * matchesOut,
470 std::vector<int> * inliersOut)
473 std::vector<cv::Point3f> inliers1;
474 std::vector<cv::Point3f> inliers2;
484 UASSERT(inliers1.size() == inliers2.size());
485 UDEBUG(
"Unique correspondences = %d", (
int)inliers1.size());
489 *covariance = cv::Mat::eye(6,6,CV_64FC1);
492 std::vector<int> inliers;
493 if((
int)inliers1.size() >= minInliers)
495 pcl::PointCloud<pcl::PointXYZ>::Ptr inliers1cloud(
new pcl::PointCloud<pcl::PointXYZ>);
496 pcl::PointCloud<pcl::PointXYZ>::Ptr inliers2cloud(
new pcl::PointCloud<pcl::PointXYZ>);
497 inliers1cloud->resize(inliers1.size());
498 inliers2cloud->resize(inliers1.size());
499 for(
unsigned int i=0; i<inliers1.size(); ++i)
501 (*inliers1cloud)[i].x = inliers1[i].x;
502 (*inliers1cloud)[i].y = inliers1[i].y;
503 (*inliers1cloud)[i].z = inliers1[i].z;
504 (*inliers2cloud)[i].x = inliers2[i].x;
505 (*inliers2cloud)[i].y = inliers2[i].y;
506 (*inliers2cloud)[i].z = inliers2[i].z;
518 if(!t.
isNull() && (int)inliers.size() >= minInliers)
526 *matchesOut = matches;
530 inliersOut->resize(inliers.size());
531 for(
unsigned int i=0; i<inliers.size(); ++i)
533 inliersOut->at(i) = matches[inliers[i]];
542 std::vector<cv::Point3f> opoints,
543 std::vector<cv::Point2f> ipoints,
544 const cv::Mat & cameraMatrix,
545 const cv::Mat & distCoeffs,
546 const cv::Mat & rvec,
547 const cv::Mat & tvec,
548 float reprojErrorThreshold,
549 std::vector<int> & inliers)
551 UASSERT(opoints.size() == ipoints.size());
552 int count = (int)opoints.size();
554 std::vector<cv::Point2f> projpoints;
555 projectPoints(opoints, rvec, tvec, cameraMatrix, distCoeffs, projpoints);
557 inliers.resize(count,0);
558 std::vector<float> err(count);
560 for (
int i = 0; i < count; ++i)
562 float e = (float)cv::norm( ipoints[i] - projpoints[i]);
563 if(e <= reprojErrorThreshold)
575 const std::vector<cv::Point3f> & objectPoints,
576 const std::vector<cv::Point2f> & imagePoints,
577 const cv::Mat & cameraMatrix,
578 const cv::Mat & distCoeffs,
581 bool useExtrinsicGuess,
583 float reprojectionError,
585 std::vector<int> & inliers,
587 int refineIterations,
590 if(minInliersCount < 4)
611 float inlierThreshold = reprojectionError;
612 if((
int)inliers.size() >= minInliersCount && refineIterations>0)
614 float error_threshold = inlierThreshold;
615 int refine_iterations = 0;
616 bool inlier_changed =
false, oscillating =
false;
617 std::vector<int> new_inliers, prev_inliers = inliers;
618 std::vector<size_t> inliers_sizes;
620 cv::Mat new_model_rvec = rvec;
621 cv::Mat new_model_tvec = tvec;
626 std::vector<cv::Point3f> opoints_inliers(prev_inliers.size());
627 std::vector<cv::Point2f> ipoints_inliers(prev_inliers.size());
628 for(
unsigned int i=0; i<prev_inliers.size(); ++i)
630 opoints_inliers[i] = objectPoints[prev_inliers[i]];
631 ipoints_inliers[i] = imagePoints[prev_inliers[i]];
634 UDEBUG(
"inliers=%d refine_iterations=%d, rvec=%f,%f,%f tvec=%f,%f,%f", (
int)prev_inliers.size(), refine_iterations,
635 *new_model_rvec.ptr<
double>(0), *new_model_rvec.ptr<
double>(1), *new_model_rvec.ptr<
double>(2),
636 *new_model_tvec.ptr<
double>(0), *new_model_tvec.ptr<
double>(1), *new_model_tvec.ptr<
double>(2));
639 cv::solvePnP(opoints_inliers, ipoints_inliers, cameraMatrix, distCoeffs, new_model_rvec, new_model_tvec,
true, flags);
640 inliers_sizes.push_back(prev_inliers.size());
642 UDEBUG(
"rvec=%f,%f,%f tvec=%f,%f,%f",
643 *new_model_rvec.ptr<
double>(0), *new_model_rvec.ptr<
double>(1), *new_model_rvec.ptr<
double>(2),
644 *new_model_tvec.ptr<
double>(0), *new_model_tvec.ptr<
double>(1), *new_model_tvec.ptr<
double>(2));
647 std::vector<float> err =
computeReprojErrors(objectPoints, imagePoints, cameraMatrix, distCoeffs, new_model_rvec, new_model_tvec, error_threshold, new_inliers);
648 UDEBUG(
"RANSAC refineModel: Number of inliers found (before/after): %d/%d, with an error threshold of %f.",
649 (
int)prev_inliers.size (), (int)new_inliers.size (), error_threshold);
651 if ((
int)new_inliers.size() < minInliersCount)
654 if (refine_iterations >= refineIterations)
662 float m =
uMean(err.data(), err.size());
663 float variance =
uVariance(err.data(), err.size());
664 error_threshold =
std::min(inlierThreshold, refineSigma *
float(
sqrt(variance)));
666 UDEBUG (
"RANSAC refineModel: New estimated error threshold: %f (variance=%f mean=%f) on iteration %d out of %d.",
667 error_threshold, variance, m, refine_iterations, refineIterations);
668 inlier_changed =
false;
672 if (new_inliers.size () != prev_inliers.size ())
675 if ((
int)inliers_sizes.size () >= minInliersCount)
677 if (inliers_sizes[inliers_sizes.size () - 1] == inliers_sizes[inliers_sizes.size () - 3] &&
678 inliers_sizes[inliers_sizes.size () - 2] == inliers_sizes[inliers_sizes.size () - 4])
684 inlier_changed =
true;
689 for (
size_t i = 0; i < prev_inliers.size (); ++i)
692 if (prev_inliers[i] != new_inliers[i])
694 inlier_changed =
true;
699 while (inlier_changed && ++refine_iterations < refineIterations);
702 if ((
int)prev_inliers.size() < minInliersCount)
704 UWARN (
"RANSAC refineModel: Refinement failed: got very low inliers (%d)!", (
int)prev_inliers.size());
709 UDEBUG(
"RANSAC refineModel: Detected oscillations in the model refinement.");
713 rvec = new_model_rvec;
714 tvec = new_model_tvec;
T uVariance(const T *v, unsigned int size, T meanV)
bool RTABMAP_EXP isFinite(const cv::Point3f &pt)
const cv::Size & imageSize() const
T uMean(const T *v, unsigned int size)
GLM_FUNC_DECL genType min(genType const &x, genType const &y)
cv::Point3f RTABMAP_EXP transformPoint(const cv::Point3f &pt, const Transform &transform)
std::vector< K > uKeys(const std::multimap< K, V > &mm)
GLM_FUNC_DECL vecType< T, P > sqrt(vecType< T, P > const &x)
GLM_FUNC_DECL genType e()
void swap(linb::any &lhs, linb::any &rhs) noexcept
Basic mathematics functions.
Transform RTABMAP_EXP transformFromXYZCorrespondences(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &cloud1, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &cloud2, double inlierThreshold=0.02, int iterations=100, int refineModelIterations=10, double refineModelSigma=3.0, std::vector< int > *inliers=0, cv::Mat *variance=0)
bool solvePnPRansac(InputArray _opoints, InputArray _ipoints, InputArray _cameraMatrix, InputArray _distCoeffs, OutputArray _rvec, OutputArray _tvec, bool useExtrinsicGuess, int iterationsCount, float reprojectionError, double confidence, OutputArray _inliers, int flags)
bool uIsFinite(const T &value)
#define UASSERT(condition)
GLM_FUNC_DECL genType cos(genType const &angle)
Wrappers of STL for convenient functions.
GLM_FUNC_DECL genType normalize(genType const &x)
#define UASSERT_MSG(condition, msg_str)
Transform RTABMAP_EXP estimateMotion3DTo2D(const std::map< int, cv::Point3f > &words3A, const std::map< int, cv::KeyPoint > &words2B, const CameraModel &cameraModel, int minInliers=10, int iterations=100, double reprojError=5., int flagsPnP=0, int pnpRefineIterations=1, float maxVariance=0, const Transform &guess=Transform::getIdentity(), const std::map< int, cv::Point3f > &words3B=std::map< int, cv::Point3f >(), cv::Mat *covariance=0, std::vector< int > *matchesOut=0, std::vector< int > *inliersOut=0)
GLM_FUNC_DECL genType atan(genType const &y, genType const &x)
const Transform & localTransform() const
Transform RTABMAP_EXP estimateMotion3DTo3D(const std::map< int, cv::Point3f > &words3A, const std::map< int, cv::Point3f > &words3B, int minInliers=10, double inliersDistance=0.1, int iterations=100, int refineIterations=5, cv::Mat *covariance=0, std::vector< int > *matchesOut=0, std::vector< int > *inliersOut=0)
void RTABMAP_EXP solvePnPRansac(const std::vector< cv::Point3f > &objectPoints, const std::vector< cv::Point2f > &imagePoints, const cv::Mat &cameraMatrix, const cv::Mat &distCoeffs, cv::Mat &rvec, cv::Mat &tvec, bool useExtrinsicGuess, int iterationsCount, float reprojectionError, int minInliersCount, std::vector< int > &inliers, int flags, int refineIterations=1, float refineSigma=3.0f)
Eigen::Vector3f RTABMAP_EXP projectDepthTo3DRay(const cv::Size &imageSize, float x, float y, float cx, float cy, float fx, float fy)
T uNormSquared(const std::vector< T > &v)
std::vector< float > computeReprojErrors(std::vector< cv::Point3f > opoints, std::vector< cv::Point2f > ipoints, const cv::Mat &cameraMatrix, const cv::Mat &distCoeffs, const cv::Mat &rvec, const cv::Mat &tvec, float reprojErrorThreshold, std::vector< int > &inliers)
ULogger class and convenient macros.
std::string UTILITE_EXP uFormat(const char *fmt,...)
bool isValidForProjection() const
void RTABMAP_EXP findCorrespondences(const std::multimap< int, cv::KeyPoint > &wordsA, const std::multimap< int, cv::KeyPoint > &wordsB, std::list< std::pair< cv::Point2f, cv::Point2f > > &pairs)
GLM_FUNC_DECL matType< T, P > inverse(matType< T, P > const &m)