00001 #include <visual_pose_estimation/planar.h>
00002 #include <limits>
00003
00004 namespace cv{
00005
00006 Point3f massCenter(const vector<Point3f>& points)
00007 {
00008 Point3f center(0.0f, 0.0f, 0.0f);
00009 for(size_t i = 0; i < points.size(); i++)
00010 {
00011 center += points[i];
00012 }
00013
00014 center = center*(1.0f/points.size());
00015
00016 return center;
00017 }
00018
00019 Point3f crossProduct(Point3f& p1, Point3f& p2)
00020 {
00021 Point3f p3(p1.y*p2.z - p1.z*p2.y, p1.z*p2.x - p1.x*p2.z, p1.x*p2.y - p1.y*p2.x);
00022 return p3;
00023 }
00024
00025
00026
00027
00028
00029
00030
00031
00032 void findPlanarObjectPose(const vector<Point3f>& _object_points, const Mat& image_points, const Point3f& normal,
00033 const Mat& intrinsic_matrix, const Mat& distortion_coeffs, double& alpha, double& C, vector<Point3f>& object_points_crf)
00034 {
00035 vector<Point2f> _rays;
00036 undistortPoints(image_points, _rays, intrinsic_matrix, distortion_coeffs);
00037
00038
00039 vector<Point3f> rays;
00040 vector<Point3f> object_points;
00041 for(size_t i = 0; i < _rays.size(); i++)
00042 {
00043 Point3f ray(_rays[i].x, _rays[i].y, 1.0f);
00044 double proj = ray.dot(normal);
00045 if(fabs(proj) > std::numeric_limits<double>::epsilon())
00046 {
00047 rays.push_back(ray*(1.0/ray.dot(normal)));
00048 object_points.push_back(_object_points[i]);
00049 }
00050 }
00051
00052 Point3f pc = massCenter(rays);
00053 Point3f p0c = massCenter(object_points);
00054
00055 vector<Point3f> drays;
00056 drays.resize(rays.size());
00057 for(size_t i = 0; i < rays.size(); i++)
00058 {
00059 drays[i] = rays[i] - pc;
00060 object_points[i] -= p0c;
00061 }
00062
00063 double s1 = 0.0, s2 = 0.0, s3 = 0.0;
00064 for(size_t i = 0; i < rays.size(); i++)
00065 {
00066 Point3f vprod = crossProduct(drays[i], object_points[i]);
00067 s1 += vprod.dot(normal);
00068 s2 += drays[i].dot(object_points[i]);
00069 s3 += drays[i].dot(drays[i]);
00070 }
00071
00072 alpha = atan2(s1, s2);
00073 C = (s2*cos(alpha) + s1*sin(alpha))/s3;
00074
00075
00076
00077 object_points_crf.resize(rays.size());
00078 for(size_t i = 0; i < rays.size(); i++)
00079 {
00080 object_points_crf[i] = rays[i]*C;
00081 }
00082 }
00083
00084 Point3f getPlanarObjectNormal(const Mat& objectPoints)
00085 {
00086 Point3f off1 = objectPoints.at<Point3f>(1, 0) - objectPoints.at<Point3f>(0, 0);
00087 Point3f off2 = objectPoints.at<Point3f>(2, 0) - objectPoints.at<Point3f>(0, 0);
00088 Point3f normal0 = crossProduct(off1, off2);
00089 normal0 = normal0*(1.0/norm(normal0));
00090 return normal0;
00091 }
00092
00093 void fit2DRotation(const vector<Point3f>& points1, const vector<Point3f>& points2, Point3f normal, Mat& rvec)
00094 {
00095 Point3f center1 = massCenter(points1);
00096 Point3f center2 = massCenter(points2);
00097
00098 float sum1 = 0.0f, sum2 = 0.0f;
00099 for(size_t i = 0; i < points1.size(); i++)
00100 {
00101 Point3f off1 = points1[i] - center1;
00102 Point3f off2 = points2[i] - center2;
00103 Point3f off1p = crossProduct(off1, normal);
00104 sum1 += off2.dot(off1);
00105 sum2 += off2.dot(off1p);
00106 }
00107
00108 double alpha = -atan2(sum1, sum2);
00109
00110 if(rvec.empty() == true)
00111 {
00112 rvec.create(3, 1, CV_32F);
00113 }
00114 rvec.at<Point3f>(0, 0) = normal*(alpha/norm(normal));
00115 }
00116
00117 void solvePlanarPnP(const Mat& objectPoints, const Mat& imagePoints, const Mat& cameraMatrix, const Mat& distCoeffs,
00118 Mat& _rvec, Mat& _tvec, bool useExtrinsicGuess)
00119 {
00120 CV_Assert(objectPoints.depth() == CV_32F && imagePoints.depth() == CV_32F);
00121
00122 if(useExtrinsicGuess == false)
00123 {
00124 solvePnP(objectPoints, imagePoints, cameraMatrix, distCoeffs, _rvec, _tvec, false);
00125 return;
00126 }
00127
00128 Mat rvec, tvec;
00129 _rvec.convertTo(rvec, CV_32FC1);
00130 _tvec.convertTo(tvec, CV_32FC1);
00131
00132
00133 Mat R(3, 3, CV_32FC1);
00134 Rodrigues(rvec, R);
00135 CV_Assert(R.type() == CV_32FC1);
00136
00137
00138 Point3f normal0 = getPlanarObjectNormal(objectPoints);
00139
00140
00141 Mat Normal0(3, 1, CV_32F);
00142 Normal0.at<Point3f>(0, 0) = normal0;
00143 Mat Normal = R*Normal0;
00144 Point3f normal = Normal.at<Point3f>(0, 0);
00145 normal = normal*(1.0/norm(normal));
00146 if(normal.z < 0) normal = -normal;
00147
00148
00149 vector<Point3f> rotated_object_points;
00150 rotated_object_points.resize(objectPoints.rows);
00151 for(size_t i = 0; i < rotated_object_points.size(); i++)
00152 {
00153 Mat p = objectPoints.rowRange(i, i + 1);
00154 p = p.reshape(1, 3);
00155 Mat res = R*p;
00156 rotated_object_points[i] = res.at<Point3f>(0, 0);
00157 }
00158
00159 double alpha, C;
00160 vector<Point3f> object_points_crf;
00161 findPlanarObjectPose(rotated_object_points, imagePoints, normal, cameraMatrix, distCoeffs, alpha, C, object_points_crf);
00162
00163 Mat rp(3, 1, CV_32FC1);
00164 rp.at<Point3f>(0, 0) = normal*alpha;
00165
00166 Mat Rp;
00167 Rodrigues(rp, Rp);
00168
00169 R = Rp*R;
00170 Rodrigues(R, rvec);
00171
00172 Point3f center1 = massCenter(rotated_object_points);
00173 Mat mcenter1(3, 1, CV_32FC1, ¢er1);
00174 Mat mcenter1_alpha = Rp*mcenter1;
00175 Point3f center1_alpha = mcenter1_alpha.at<Point3f>(0, 0);
00176
00177 Point3f center2 = massCenter(object_points_crf);
00178 tvec.at<Point3f>(0, 0) = center2 - center1_alpha;
00179
00180 Mat mobj;
00181 objectPoints.copyTo(mobj);
00182 mobj = mobj.reshape(1);
00183
00184 CV_Assert(R.type() == CV_32FC1 && mobj.type() == CV_32FC1);
00185 Mat mrobj = R*mobj.t();
00186 mrobj = mrobj.t();
00187 Point3f p1 = mrobj.at<Point3f>(0, 0) + center2 - center1;
00188 Point3f p2 = object_points_crf[0];
00189
00190
00191
00192 rvec.convertTo(_rvec, _rvec.depth());
00193 tvec.convertTo(_tvec, _tvec.depth());
00194 }
00195
00196 }