38 #include <pcl/common/common.h> 49 const std::map<int, cv::Point3f> & words3A,
50 const std::map<int, cv::KeyPoint> & words2B,
58 const std::map<int, cv::Point3f> & words3B,
60 std::vector<int> * matchesOut,
61 std::vector<int> * inliersOut)
66 std::vector<int> matches, inliers;
70 *covariance = cv::Mat::eye(6,6,CV_64FC1);
74 std::vector<int> ids =
uKeys(words2B);
75 std::vector<cv::Point3f> objectPoints(ids.size());
76 std::vector<cv::Point2f> imagePoints(ids.size());
78 matches.resize(ids.size());
79 for(
unsigned int i=0; i<ids.size(); ++i)
81 std::map<int, cv::Point3f>::const_iterator iter=words3A.find(ids[i]);
84 const cv::Point3f & pt = iter->second;
85 objectPoints[oi].x = pt.x;
86 objectPoints[oi].y = pt.y;
87 objectPoints[oi].z = pt.z;
88 imagePoints[oi] = words2B.find(ids[i])->second.pt;
89 matches[oi++] = ids[i];
93 objectPoints.resize(oi);
94 imagePoints.resize(oi);
97 UDEBUG(
"words3A=%d words2B=%d matches=%d words3B=%d guess=%s",
98 (
int)words3A.size(), (int)words2B.size(), (int)matches.size(), (int)words3B.size(), guess.
prettyPrint().c_str());
100 if((
int)matches.size() >= minInliers)
103 cv::Mat K = cameraModel.
K();
104 cv::Mat D = cameraModel.
D();
106 cv::Mat R = (cv::Mat_<double>(3,3) <<
107 (
double)guessCameraFrame.
r11(), (double)guessCameraFrame.
r12(), (double)guessCameraFrame.
r13(),
108 (double)guessCameraFrame.
r21(), (double)guessCameraFrame.
r22(), (double)guessCameraFrame.
r23(),
109 (double)guessCameraFrame.
r31(), (double)guessCameraFrame.
r32(), (double)guessCameraFrame.
r33());
111 cv::Mat rvec(1,3, CV_64FC1);
112 cv::Rodrigues(R, rvec);
113 cv::Mat tvec = (cv::Mat_<double>(1,3) <<
114 (
double)guessCameraFrame.
x(), (double)guessCameraFrame.
y(), (double)guessCameraFrame.
z());
131 if((
int)inliers.size() >= minInliers)
133 cv::Rodrigues(rvec, R);
134 Transform pnp(R.at<
double>(0,0), R.at<
double>(0,1), R.at<
double>(0,2), tvec.at<
double>(0),
135 R.at<
double>(1,0), R.at<
double>(1,1), R.at<
double>(1,2), tvec.at<
double>(1),
136 R.at<
double>(2,0), R.at<
double>(2,1), R.at<
double>(2,2), tvec.at<
double>(2));
141 if(covariance && words3B.size())
143 std::vector<float> errorSqrdDists(inliers.size());
144 std::vector<float> errorSqrdAngles(inliers.size());
146 for(
unsigned int i=0; i<inliers.size(); ++i)
148 std::map<int, cv::Point3f>::const_iterator iter = words3B.find(matches[inliers[i]]);
151 const cv::Point3f & objPt = objectPoints[inliers[i]];
153 errorSqrdDists[oi] =
uNormSquared(objPt.x-newPt.x, objPt.y-newPt.y, objPt.z-newPt.z);
155 Eigen::Vector4f v1(objPt.x - transform.x(), objPt.y - transform.y(), objPt.z - transform.z(), 0);
156 Eigen::Vector4f v2(newPt.x - transform.x(), newPt.y - transform.y(), newPt.z - transform.z(), 0);
157 errorSqrdAngles[oi++] = pcl::getAngle3D(v1, v2);
161 errorSqrdDists.resize(oi);
162 errorSqrdAngles.resize(oi);
163 if(errorSqrdDists.size())
165 std::sort(errorSqrdDists.begin(), errorSqrdDists.end());
167 double median_error_sqr = 2.1981 * (double)errorSqrdDists[errorSqrdDists.size () >> 2];
169 (*covariance)(cv::Range(0,3), cv::Range(0,3)) *= median_error_sqr;
170 std::sort(errorSqrdAngles.begin(), errorSqrdAngles.end());
171 median_error_sqr = 2.1981 * (double)errorSqrdAngles[errorSqrdAngles.size () >> 2];
173 (*covariance)(cv::Range(3,6), cv::Range(3,6)) *= median_error_sqr;
177 UWARN(
"Not enough close points to compute covariance!");
183 std::vector<cv::Point2f> imagePointsReproj;
184 cv::projectPoints(objectPoints, rvec, tvec, K, cv::Mat(), imagePointsReproj);
186 for(
unsigned int i=0; i<inliers.size(); ++i)
188 err +=
uNormSquared(imagePoints.at(inliers[i]).x - imagePointsReproj.at(inliers[i]).x, imagePoints.at(inliers[i]).y - imagePointsReproj.at(inliers[i]).y);
191 *covariance *=
std::sqrt(err/
float(inliers.size()));
198 *matchesOut = matches;
202 inliersOut->resize(inliers.size());
203 for(
unsigned int i=0; i<inliers.size(); ++i)
205 inliersOut->at(i) = matches[inliers[i]];
213 const std::map<int, cv::Point3f> & words3A,
214 const std::map<int, cv::Point3f> & words3B,
216 double inliersDistance,
218 int refineIterations,
219 cv::Mat * covariance,
220 std::vector<int> * matchesOut,
221 std::vector<int> * inliersOut)
224 std::vector<cv::Point3f> inliers1;
225 std::vector<cv::Point3f> inliers2;
227 std::vector<int> matches;
235 UASSERT(inliers1.size() == inliers2.size());
236 UDEBUG(
"Unique correspondences = %d", (
int)inliers1.size());
240 *covariance = cv::Mat::eye(6,6,CV_64FC1);
243 std::vector<int> inliers;
244 if((
int)inliers1.size() >= minInliers)
246 pcl::PointCloud<pcl::PointXYZ>::Ptr inliers1cloud(
new pcl::PointCloud<pcl::PointXYZ>);
247 pcl::PointCloud<pcl::PointXYZ>::Ptr inliers2cloud(
new pcl::PointCloud<pcl::PointXYZ>);
248 inliers1cloud->resize(inliers1.size());
249 inliers2cloud->resize(inliers1.size());
250 for(
unsigned int i=0; i<inliers1.size(); ++i)
252 (*inliers1cloud)[i].x = inliers1[i].x;
253 (*inliers1cloud)[i].y = inliers1[i].y;
254 (*inliers1cloud)[i].z = inliers1[i].z;
255 (*inliers2cloud)[i].x = inliers2[i].x;
256 (*inliers2cloud)[i].y = inliers2[i].y;
257 (*inliers2cloud)[i].z = inliers2[i].z;
269 if(!t.
isNull() && (int)inliers.size() >= minInliers)
277 *matchesOut = matches;
281 inliersOut->resize(inliers.size());
282 for(
unsigned int i=0; i<inliers.size(); ++i)
284 inliersOut->at(i) = matches[inliers[i]];
293 std::vector<cv::Point3f> opoints,
294 std::vector<cv::Point2f> ipoints,
295 const cv::Mat & cameraMatrix,
296 const cv::Mat & distCoeffs,
297 const cv::Mat & rvec,
298 const cv::Mat & tvec,
299 float reprojErrorThreshold,
300 std::vector<int> & inliers)
302 UASSERT(opoints.size() == ipoints.size());
303 int count = (int)opoints.size();
305 std::vector<cv::Point2f> projpoints;
306 projectPoints(opoints, rvec, tvec, cameraMatrix, distCoeffs, projpoints);
308 inliers.resize(count,0);
309 std::vector<float> err(count);
311 for (
int i = 0; i < count; ++i)
313 float e = (float)cv::norm( ipoints[i] - projpoints[i]);
314 if(e <= reprojErrorThreshold)
326 const std::vector<cv::Point3f> & objectPoints,
327 const std::vector<cv::Point2f> & imagePoints,
328 const cv::Mat & cameraMatrix,
329 const cv::Mat & distCoeffs,
332 bool useExtrinsicGuess,
334 float reprojectionError,
336 std::vector<int> & inliers,
338 int refineIterations,
341 if(minInliersCount < 4)
362 float inlierThreshold = reprojectionError;
363 if((
int)inliers.size() >= minInliersCount && refineIterations>0)
365 float error_threshold = inlierThreshold;
366 int refine_iterations = 0;
367 bool inlier_changed =
false, oscillating =
false;
368 std::vector<int> new_inliers, prev_inliers = inliers;
369 std::vector<size_t> inliers_sizes;
371 cv::Mat new_model_rvec = rvec;
372 cv::Mat new_model_tvec = tvec;
377 std::vector<cv::Point3f> opoints_inliers(prev_inliers.size());
378 std::vector<cv::Point2f> ipoints_inliers(prev_inliers.size());
379 for(
unsigned int i=0; i<prev_inliers.size(); ++i)
381 opoints_inliers[i] = objectPoints[prev_inliers[i]];
382 ipoints_inliers[i] = imagePoints[prev_inliers[i]];
385 UDEBUG(
"inliers=%d refine_iterations=%d, rvec=%f,%f,%f tvec=%f,%f,%f", (
int)prev_inliers.size(), refine_iterations,
386 *new_model_rvec.ptr<
double>(0), *new_model_rvec.ptr<
double>(1), *new_model_rvec.ptr<
double>(2),
387 *new_model_tvec.ptr<
double>(0), *new_model_tvec.ptr<
double>(1), *new_model_tvec.ptr<
double>(2));
390 cv::solvePnP(opoints_inliers, ipoints_inliers, cameraMatrix, distCoeffs, new_model_rvec, new_model_tvec,
true, flags);
391 inliers_sizes.push_back(prev_inliers.size());
393 UDEBUG(
"rvec=%f,%f,%f tvec=%f,%f,%f",
394 *new_model_rvec.ptr<
double>(0), *new_model_rvec.ptr<
double>(1), *new_model_rvec.ptr<
double>(2),
395 *new_model_tvec.ptr<
double>(0), *new_model_tvec.ptr<
double>(1), *new_model_tvec.ptr<
double>(2));
398 std::vector<float> err =
computeReprojErrors(objectPoints, imagePoints, cameraMatrix, distCoeffs, new_model_rvec, new_model_tvec, error_threshold, new_inliers);
399 UDEBUG(
"RANSAC refineModel: Number of inliers found (before/after): %d/%d, with an error threshold of %f.",
400 (
int)prev_inliers.size (), (int)new_inliers.size (), error_threshold);
402 if ((
int)new_inliers.size() < minInliersCount)
405 if (refine_iterations >= refineIterations)
413 float m =
uMean(err.data(), err.size());
414 float variance =
uVariance(err.data(), err.size());
415 error_threshold =
std::min(inlierThreshold, refineSigma *
float(
sqrt(variance)));
417 UDEBUG (
"RANSAC refineModel: New estimated error threshold: %f (variance=%f mean=%f) on iteration %d out of %d.",
418 error_threshold, variance, m, refine_iterations, refineIterations);
419 inlier_changed =
false;
420 std::swap (prev_inliers, new_inliers);
423 if (new_inliers.size () != prev_inliers.size ())
426 if ((
int)inliers_sizes.size () >= minInliersCount)
428 if (inliers_sizes[inliers_sizes.size () - 1] == inliers_sizes[inliers_sizes.size () - 3] &&
429 inliers_sizes[inliers_sizes.size () - 2] == inliers_sizes[inliers_sizes.size () - 4])
435 inlier_changed =
true;
440 for (
size_t i = 0; i < prev_inliers.size (); ++i)
443 if (prev_inliers[i] != new_inliers[i])
445 inlier_changed =
true;
450 while (inlier_changed && ++refine_iterations < refineIterations);
453 if ((
int)prev_inliers.size() < minInliersCount)
455 UWARN (
"RANSAC refineModel: Refinement failed: got very low inliers (%d)!", (
int)prev_inliers.size());
460 UDEBUG(
"RANSAC refineModel: Detected oscillations in the model refinement.");
463 std::swap (inliers, new_inliers);
464 rvec = new_model_rvec;
465 tvec = new_model_tvec;
T uVariance(const T *v, unsigned int size, T meanV)
bool RTABMAP_EXP isFinite(const cv::Point3f &pt)
Transform RTABMAP_EXP estimateMotion3DTo2D(const std::map< int, cv::Point3f > &words3A, const std::map< int, cv::KeyPoint > &words2B, const CameraModel &cameraModel, int minInliers=10, int iterations=100, double reprojError=5., int flagsPnP=0, int pnpRefineIterations=1, const Transform &guess=Transform::getIdentity(), const std::map< int, cv::Point3f > &words3B=std::map< int, cv::Point3f >(), cv::Mat *covariance=0, std::vector< int > *matchesOut=0, std::vector< int > *inliersOut=0)
T uMean(const T *v, unsigned int size)
GLM_FUNC_DECL genType min(genType const &x, genType const &y)
cv::Point3f RTABMAP_EXP transformPoint(const cv::Point3f &pt, const Transform &transform)
std::vector< K > uKeys(const std::multimap< K, V > &mm)
GLM_FUNC_DECL vecType< T, P > sqrt(vecType< T, P > const &x)
GLM_FUNC_DECL genType e()
Basic mathematics functions.
Transform RTABMAP_EXP transformFromXYZCorrespondences(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &cloud1, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &cloud2, double inlierThreshold=0.02, int iterations=100, int refineModelIterations=10, double refineModelSigma=3.0, std::vector< int > *inliers=0, cv::Mat *variance=0)
bool solvePnPRansac(InputArray _opoints, InputArray _ipoints, InputArray _cameraMatrix, InputArray _distCoeffs, OutputArray _rvec, OutputArray _tvec, bool useExtrinsicGuess, int iterationsCount, float reprojectionError, double confidence, OutputArray _inliers, int flags)
bool uIsFinite(const T &value)
#define UASSERT(condition)
Wrappers of STL for convenient functions.
Transform RTABMAP_EXP estimateMotion3DTo3D(const std::map< int, cv::Point3f > &words3A, const std::map< int, cv::Point3f > &words3B, int minInliers=10, double inliersDistance=0.1, int iterations=100, int refineIterations=5, cv::Mat *covariance=0, std::vector< int > *matchesOut=0, std::vector< int > *inliersOut=0)
void RTABMAP_EXP solvePnPRansac(const std::vector< cv::Point3f > &objectPoints, const std::vector< cv::Point2f > &imagePoints, const cv::Mat &cameraMatrix, const cv::Mat &distCoeffs, cv::Mat &rvec, cv::Mat &tvec, bool useExtrinsicGuess, int iterationsCount, float reprojectionError, int minInliersCount, std::vector< int > &inliers, int flags, int refineIterations=1, float refineSigma=3.0f)
bool isValidForProjection() const
T uNormSquared(const std::vector< T > &v)
std::vector< float > computeReprojErrors(std::vector< cv::Point3f > opoints, std::vector< cv::Point2f > ipoints, const cv::Mat &cameraMatrix, const cv::Mat &distCoeffs, const cv::Mat &rvec, const cv::Mat &tvec, float reprojErrorThreshold, std::vector< int > &inliers)
ULogger class and convenient macros.
void RTABMAP_EXP findCorrespondences(const std::multimap< int, cv::KeyPoint > &wordsA, const std::multimap< int, cv::KeyPoint > &wordsB, std::list< std::pair< cv::Point2f, cv::Point2f > > &pairs)
const Transform & localTransform() const
GLM_FUNC_DECL matType< T, P > inverse(matType< T, P > const &m)