Camera.cpp
Go to the documentation of this file.
00001 /*
00002  * This file is part of ALVAR, A Library for Virtual and Augmented Reality.
00003  *
00004  * Copyright 2007-2012 VTT Technical Research Centre of Finland
00005  *
00006  * Contact: VTT Augmented Reality Team <alvar.info@vtt.fi>
00007  *          <http://www.vtt.fi/multimedia/alvar.html>
00008  *
00009  * ALVAR is free software; you can redistribute it and/or modify it under the
00010  * terms of the GNU Lesser General Public License as published by the Free
00011  * Software Foundation; either version 2.1 of the License, or (at your option)
00012  * any later version.
00013  *
00014  * This library is distributed in the hope that it will be useful, but WITHOUT
00015  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
00016  * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
00017  * for more details.
00018  *
00019  * You should have received a copy of the GNU Lesser General Public License
00020  * along with ALVAR; if not, see
00021  * <http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html>.
00022  */
00023 
00024 #include "Alvar.h"
00025 #include "Camera.h"
00026 #include "FileFormatUtils.h"
00027 #include <memory>
00028 
00029 using namespace std;
00030 
00031 namespace alvar {
00032 using namespace std;
00033 
00034 
00035 void ProjPoints::Reset() {
00036         object_points.clear();
00037         image_points.clear();
00038         point_counts.clear();
00039 }
00040 
00041 // TODO: Does it matter what the etalon_square_size is???
00042 bool ProjPoints::AddPointsUsingChessboard(IplImage *image, double etalon_square_size, int etalon_rows, int etalon_columns, bool visualize) {
00043         if (image->width == 0) return false;
00044         IplImage *gray = cvCreateImage(cvSize(image->width, image->height), IPL_DEPTH_8U, 1);
00045         CvPoint2D32f *corners = new CvPoint2D32f[etalon_rows*etalon_columns];
00046         if (image->nChannels == 1) 
00047                 cvCopy(image, gray);
00048         else
00049                 cvCvtColor(image, gray, CV_RGB2GRAY);
00050         width = image->width;
00051         height = image->height;
00052 
00053         int point_count = 0;
00054         int pattern_was_found = cvFindChessboardCorners(gray, cvSize(etalon_rows, etalon_columns), corners, &point_count);
00055         if (!pattern_was_found) point_count=0;
00056         if (point_count > 0) {
00057                 cvFindCornerSubPix(gray, corners, point_count, cvSize(5,5), cvSize(-1,-1),
00058                         cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 10, 0.01f));
00059                 for (int i=0; i<point_count; i++) {
00060                         CvPoint3D64f po;
00061                         CvPoint2D64f pi;
00062                         po.x = etalon_square_size*(i%etalon_rows);
00063                         po.y = etalon_square_size*(i/etalon_rows);
00064                         po.z = 0;
00065                         pi.x = corners[i].x;
00066                         pi.y = corners[i].y;
00067                         object_points.push_back(po);
00068                         image_points.push_back(pi);
00069                 }
00070                 point_counts.push_back(point_count);
00071         }
00072         if (visualize) {
00073                 cvDrawChessboardCorners(image, cvSize(etalon_rows, etalon_columns),
00074                       corners, point_count, false /*pattern_was_found*/);
00075         }
00076         delete [] corners;
00077         cvReleaseImage(&gray);
00078         if (point_count > 0) return true;
00079         return false;
00080 }
00081 
00082 bool ProjPoints::AddPointsUsingMarkers(vector<PointDouble> &marker_corners, vector<PointDouble> &marker_corners_img, IplImage* image)
00083 {
00084         width = image->width;
00085         height = image->height;
00086         if ((marker_corners.size() == marker_corners_img.size()) &&
00087                 (marker_corners.size() == 4))
00088         {
00089                 for (size_t p=0; p<marker_corners.size(); p++) {
00090                         CvPoint3D64f po;
00091                         CvPoint2D64f pi;
00092                         po.x = marker_corners[p].x;
00093                         po.y = marker_corners[p].y;
00094                         po.z = 0;
00095                         pi.x = marker_corners_img[p].x;
00096                         pi.y = marker_corners_img[p].y;
00097                         object_points.push_back(po);
00098                         image_points.push_back(pi);
00099                 }
00100                 point_counts.push_back(marker_corners.size());
00101         }
00102 
00103         return true;
00104 }
00105 
00106 Camera::Camera() {
00107         calib_K = cvMat(3, 3, CV_64F, calib_K_data);
00108         calib_D = cvMat(4, 1, CV_64F, calib_D_data);
00109         memset(calib_K_data,0,sizeof(double)*3*3);
00110         memset(calib_D_data,0,sizeof(double)*4);
00111         calib_K_data[0][0] = 550; // Just some focal length by default
00112         calib_K_data[1][1] = 550; // Just some focal length by default
00113         calib_K_data[0][2] = 320;
00114         calib_K_data[1][2] = 240;
00115         calib_K_data[2][2] = 1;
00116         calib_x_res = 640;
00117         calib_y_res = 480;
00118         x_res = 640;
00119         y_res = 480;
00120 }
00121 
00122 
00123 Camera::Camera(ros::NodeHandle & n, std::string cam_info_topic):n_(n) 
00124 {
00125         calib_K = cvMat(3, 3, CV_64F, calib_K_data);
00126         calib_D = cvMat(4, 1, CV_64F, calib_D_data);
00127         memset(calib_K_data,0,sizeof(double)*3*3);
00128         memset(calib_D_data,0,sizeof(double)*4);
00129         calib_K_data[0][0] = 550; // Just some focal length by default
00130         calib_K_data[1][1] = 550; // Just some focal length by default
00131         calib_K_data[0][2] = 320;
00132         calib_K_data[1][2] = 240;
00133         calib_K_data[2][2] = 1;
00134         calib_x_res = 640;
00135         calib_y_res = 480;
00136         x_res = 640;
00137         y_res = 480;
00138         cameraInfoTopic_ = cam_info_topic;
00139         ROS_INFO ("Subscribing to info topic");
00140     sub_ = n_.subscribe (cameraInfoTopic_, 1, &Camera::camInfoCallback, this);
00141     getCamInfo_ = false;
00142 }
00143 
00144 
00145 
00146 //
00147 //Camera::Camera(int w, int h) {
00148 //      calib_K = cvMat(3, 3, CV_64F, calib_K_data);
00149 //      calib_D = cvMat(4, 1, CV_64F, calib_D_data);
00150 //      memset(calib_K_data,0,sizeof(double)*3*3);
00151 //      memset(calib_D_data,0,sizeof(double)*4);
00152 //      calib_K_data[0][0] = w/2;
00153 //      calib_K_data[1][1] = w/2;
00154 //      calib_K_data[0][2] = w/2;
00155 //      calib_K_data[1][2] = h/2;
00156 //      calib_K_data[2][2] = 1;
00157 //      calib_x_res = w;
00158 //      calib_y_res = h;
00159 //      x_res = w;
00160 //      y_res = h;
00161 //}
00162 
00163 void Camera::SetSimpleCalib(int _x_res, int _y_res, double f_fac)
00164 {
00165         memset(calib_K_data,0,sizeof(double)*3*3);
00166         memset(calib_D_data,0,sizeof(double)*4);
00167         calib_K_data[0][0] = _x_res*f_fac; // Just some focal length by default
00168         calib_K_data[1][1] = _x_res*f_fac; // Just some focal length by default
00169         calib_K_data[0][2] = _x_res/2;
00170         calib_K_data[1][2] = _y_res/2;
00171         calib_K_data[2][2] = 1;
00172         calib_x_res = _x_res;
00173         calib_y_res = _y_res;
00174 }
00175 
00176 bool Camera::LoadCalibXML(const char *calibfile) {
00177         TiXmlDocument document;
00178         if (!document.LoadFile(calibfile)) return false;
00179         TiXmlElement *xml_root = document.RootElement();
00180 
00181         return 
00182                 xml_root->QueryIntAttribute("width", &calib_x_res) == TIXML_SUCCESS &&
00183                 xml_root->QueryIntAttribute("height", &calib_y_res) == TIXML_SUCCESS &&
00184                 FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("intrinsic_matrix"), &calib_K) && 
00185                 FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("distortion"), &calib_D);
00186 }
00187 
00188 bool Camera::LoadCalibOpenCV(const char *calibfile) {
00189         cvSetErrMode(CV_ErrModeSilent);
00190         CvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_READ); 
00191         cvSetErrMode(CV_ErrModeLeaf);
00192         if(fs){
00193                 CvFileNode* root_node = cvGetRootFileNode(fs);
00194                 // K Intrinsic
00195                 CvFileNode* intrinsic_mat_node = cvGetFileNodeByName(fs, root_node, "intrinsic_matrix");
00196                 CvMat* intrinsic_mat = reinterpret_cast<CvMat*>(cvRead(fs, intrinsic_mat_node));
00197                 cvmSet(&calib_K, 0, 0, cvmGet(intrinsic_mat, 0, 0));
00198                 cvmSet(&calib_K, 0, 1, cvmGet(intrinsic_mat, 0, 1));
00199                 cvmSet(&calib_K, 0, 2, cvmGet(intrinsic_mat, 0, 2));
00200                 cvmSet(&calib_K, 1, 0, cvmGet(intrinsic_mat, 1, 0));
00201                 cvmSet(&calib_K, 1, 1, cvmGet(intrinsic_mat, 1, 1));
00202                 cvmSet(&calib_K, 1, 2, cvmGet(intrinsic_mat, 1, 2));
00203                 cvmSet(&calib_K, 2, 0, cvmGet(intrinsic_mat, 2, 0));
00204                 cvmSet(&calib_K, 2, 1, cvmGet(intrinsic_mat, 2, 1));
00205                 cvmSet(&calib_K, 2, 2, cvmGet(intrinsic_mat, 2, 2));
00206 
00207                 // D Distortion
00208                 CvFileNode* dist_mat_node = cvGetFileNodeByName(fs, root_node, "distortion");
00209                 CvMat* dist_mat = reinterpret_cast<CvMat*>(cvRead(fs, dist_mat_node));
00210                 cvmSet(&calib_D, 0, 0, cvmGet(dist_mat, 0, 0));
00211                 cvmSet(&calib_D, 1, 0, cvmGet(dist_mat, 1, 0));
00212                 cvmSet(&calib_D, 2, 0, cvmGet(dist_mat, 2, 0));
00213                 cvmSet(&calib_D, 3, 0, cvmGet(dist_mat, 3, 0));
00214 
00215                 // Resolution
00216                 CvFileNode* width_node = cvGetFileNodeByName(fs, root_node, "width");
00217                 CvFileNode* height_node = cvGetFileNodeByName(fs, root_node, "height");
00218                 calib_x_res = width_node->data.i;
00219                 calib_y_res = height_node->data.i;
00220                 cvReleaseFileStorage(&fs); 
00221                 return true;
00222         }
00223         // reset error status
00224         cvSetErrStatus(CV_StsOk);
00225         return false;
00226 }
00227 
00228 void Camera::camInfoCallback (const sensor_msgs::CameraInfoConstPtr & cam_info)
00229   {
00230     if (!getCamInfo_)
00231     {
00232                 cam_info_ = (*cam_info);
00233 
00234                 calib_x_res = cam_info_.width;
00235                 calib_y_res = cam_info_.height;
00236                 x_res = calib_x_res;
00237                 y_res = calib_y_res;
00238 
00239                 cvmSet(&calib_K, 0, 0, cam_info_.K[0]);
00240                 cvmSet(&calib_K, 0, 1, cam_info_.K[1]);
00241                 cvmSet(&calib_K, 0, 2, cam_info_.K[2]);
00242                 cvmSet(&calib_K, 1, 0, cam_info_.K[3]);
00243                 cvmSet(&calib_K, 1, 1, cam_info_.K[4]);
00244                 cvmSet(&calib_K, 1, 2, cam_info_.K[5]);
00245                 cvmSet(&calib_K, 2, 0, cam_info_.K[6]);
00246                 cvmSet(&calib_K, 2, 1, cam_info_.K[7]);
00247                 cvmSet(&calib_K, 2, 2, cam_info_.K[8]);
00248 
00249                 cvmSet(&calib_D, 0, 0, cam_info_.D[0]);
00250                 cvmSet(&calib_D, 1, 0, cam_info_.D[1]);
00251                 cvmSet(&calib_D, 2, 0, cam_info_.D[2]);
00252                 cvmSet(&calib_D, 3, 0, cam_info_.D[3]);         
00253                   
00254                 getCamInfo_ = true;
00255     }
00256   }
00257 
00258 bool Camera::SetCalib(const char *calibfile, int _x_res, int _y_res, FILE_FORMAT format) {
00259         x_res = _x_res;
00260         y_res = _y_res;
00261         if(!calibfile) return false;
00262 
00263         bool success = false;
00264         switch (format) {
00265                 case FILE_FORMAT_XML:
00266                         success = LoadCalibXML(calibfile);
00267                         break;
00268                 case FILE_FORMAT_OPENCV:
00269                 case FILE_FORMAT_DEFAULT:
00270                         success = LoadCalibOpenCV(calibfile);
00271                         break;
00272                 default:
00273                         // TODO: throw exception?
00274                         break;
00275         };
00276 
00277         if (success) {
00278                 // Scale matrix in case of different resolution calibration.
00279                 // The OpenCV documentation says:
00280                 // - If an image from camera is up-sampled/down-sampled by some factor, all intrinsic camera parameters 
00281                 //   (fx, fy, cx and cy) should be scaled (multiplied/divided, respectively) by the same factor.
00282                 // - The distortion coefficients remain the same regardless of the captured image resolution.
00283                 if ((calib_x_res != x_res) || (calib_y_res != y_res)) {
00284                         calib_K_data[0][0] *= (double(x_res)/double(calib_x_res));
00285                         calib_K_data[0][2] *= (double(x_res)/double(calib_x_res));
00286                         calib_K_data[1][1] *= (double(y_res)/double(calib_y_res));
00287                         calib_K_data[1][2] *= (double(y_res)/double(calib_y_res));
00288                 }
00289         }
00290         return success;
00291 }
00292 
00293 bool Camera::SaveCalibXML(const char *calibfile) {
00294         TiXmlDocument document;
00295         document.LinkEndChild(new TiXmlDeclaration("1.0", "UTF-8", "no"));
00296         document.LinkEndChild(new TiXmlElement("camera"));
00297         TiXmlElement *xml_root = document.RootElement();
00298         xml_root->SetAttribute("width", calib_x_res);
00299         xml_root->SetAttribute("height", calib_y_res);
00300         xml_root->LinkEndChild(FileFormatUtils::createXMLMatrix("intrinsic_matrix", &calib_K));
00301         xml_root->LinkEndChild(FileFormatUtils::createXMLMatrix("distortion", &calib_D));
00302         return document.SaveFile(calibfile);
00303 }
00304 
00305 bool Camera::SaveCalibOpenCV(const char *calibfile) {
00306         cvSetErrMode(CV_ErrModeSilent);
00307         CvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_WRITE); 
00308         cvSetErrMode(CV_ErrModeLeaf);
00309         if(fs){
00310                 cvWrite(fs, "intrinsic_matrix", &calib_K, cvAttrList(0,0)); 
00311                 cvWrite(fs, "distortion", &calib_D, cvAttrList(0,0)); 
00312                 //cvWriteReal(fs, "fov_x", data.fov_x); 
00313                 //cvWriteReal(fs, "fov_y", data.fov_y); 
00314                 cvWriteInt(fs, "width", calib_x_res);
00315                 cvWriteInt(fs, "height", calib_y_res);
00316                 cvReleaseFileStorage(&fs); 
00317                 return true;
00318         }
00319         // reset error status
00320         cvSetErrStatus(CV_StsOk);
00321         return false;
00322 }
00323 
00324 bool Camera::SaveCalib(const char *calibfile, FILE_FORMAT format) {
00325         if(!calibfile)
00326                 return false;
00327 
00328         switch (format) {
00329                 case FILE_FORMAT_XML:
00330                         return SaveCalibXML(calibfile);
00331                 case FILE_FORMAT_OPENCV:
00332                 case FILE_FORMAT_DEFAULT:
00333                         return SaveCalibOpenCV(calibfile);
00334                 default:
00335                         return false;
00336         };
00337 }
00338 
00339 void Camera::Calibrate(ProjPoints &pp)
00340 {
00341         CvMat *object_points = cvCreateMat((int)pp.object_points.size(), 1, CV_32FC3);
00342         CvMat *image_points = cvCreateMat((int)pp.image_points.size(), 1, CV_32FC2);
00343         const CvMat point_counts = cvMat((int)pp.point_counts.size(), 1, CV_32SC1, &pp.point_counts[0]);
00344         for (size_t i=0; i<pp.object_points.size(); i++) {
00345                 object_points->data.fl[i*3+0] = (float)pp.object_points[i].x;
00346                 object_points->data.fl[i*3+1] = (float)pp.object_points[i].y;
00347                 object_points->data.fl[i*3+2] = (float)pp.object_points[i].z;
00348                 image_points->data.fl[i*2+0]  = (float)pp.image_points[i].x;
00349                 image_points->data.fl[i*2+1]  = (float)pp.image_points[i].y;
00350         }
00351         cvCalibrateCamera2(object_points, image_points, &point_counts, 
00352                 cvSize(pp.width, pp.height), 
00353                 &calib_K, &calib_D, 0, 0, CV_CALIB_USE_INTRINSIC_GUESS);
00354 
00355         calib_x_res = pp.width;
00356         calib_y_res = pp.height;
00357         
00358         cvReleaseMat(&object_points);
00359         cvReleaseMat(&image_points);
00360 }
00361 
00362 void Camera::SetRes(int _x_res, int _y_res) {
00363         x_res = _x_res;
00364         y_res = _y_res;
00365         // Scale matrix in case of different resolution calibration.
00366         // The OpenCV documentation says:
00367         // - If an image from camera is up-sampled/down-sampled by some factor, all intrinsic camera parameters 
00368         //   (fx, fy, cx and cy) should be scaled (multiplied/divided, respectively) by the same factor.
00369         // - The distortion coefficients remain the same regardless of the captured image resolution.
00370         if ((calib_x_res != x_res) || (calib_y_res != y_res)) {
00371                 calib_K_data[0][0] *= (double(x_res)/double(calib_x_res));
00372                 calib_K_data[0][2] *= (double(x_res)/double(calib_x_res));
00373                 calib_K_data[1][1] *= (double(y_res)/double(calib_y_res));
00374                 calib_K_data[1][2] *= (double(y_res)/double(calib_y_res));
00375         }
00376 }
00377 
00378 // TODO: Better approach for this...
00379 // Note, the proj_matrix[8] is now negated. This is due to the fact
00380 // that with OpenCV and OpenGL projection matrices both y and z
00381 // should be mirrored. All other components are 
00382 void Camera::GetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height, const float far_clip /*= 1000.0f*/, const float near_clip /*= 0.1f*/) {
00383         proj_matrix[0]  = 2.0f * calib_K_data[0][0] / float(width);
00384         proj_matrix[1]  = 0;
00385         proj_matrix[2]  = 0;
00386         proj_matrix[3]  = 0;
00387         proj_matrix[4]  = 2.0f * calib_K_data[0][1] / float(width); // skew
00388         proj_matrix[5]  = 2.0f * calib_K_data[1][1] / float(height);
00389         proj_matrix[6]  = 0;
00390         proj_matrix[7]  = 0;
00391         //proj_matrix[8]        = (2.0f * calib_K_data[0][2] / float(width)) - 1.0f;
00392         proj_matrix[8]  = -(2.0f * calib_K_data[0][2] / float(width)) + 1.0f;
00393         proj_matrix[9]  = (2.0f * calib_K_data[1][2] / float(height)) - 1.0f;
00394         proj_matrix[10] = -(far_clip + near_clip)/(far_clip - near_clip);
00395         proj_matrix[11] = -1.0f;
00396         proj_matrix[12] = 0;
00397         proj_matrix[13] = 0;
00398         proj_matrix[14] = -2.0f * far_clip * near_clip / (far_clip - near_clip);
00399         proj_matrix[15] = 0;
00400 }
00401 
00402 void Camera::SetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height) {
00403         x_res = width;
00404         y_res = height;
00405         calib_x_res = width;
00406         calib_y_res = height;
00407         calib_K_data[0][0] = proj_matrix[0] * float(width) / 2.0f;
00408         calib_K_data[0][1] = proj_matrix[4] * float(width) / 2.0f;
00409         calib_K_data[1][1] = proj_matrix[5] * float(height) / 2.0f;
00410         //calib_K_data[0][2] = (proj_matrix[8] + 1.0f) * float(width) / 2.0f;
00411         calib_K_data[0][2] = (-proj_matrix[8] + 1.0f) * float(width) / 2.0f; // Is this ok?
00412         calib_K_data[1][2] = (proj_matrix[9] + 1.0f) * float(height) / 2.0f;
00413         calib_K_data[2][2] = 1;
00414 }
00415 
00416 void Camera::Undistort(PointDouble &point)
00417 {
00418 /*
00419         // focal length
00420         double ifx = 1./cvmGet(&calib_K, 0, 0);
00421         double ify = 1./cvmGet(&calib_K, 1, 1);
00422 
00423         // principal point
00424         double cx = cvmGet(&calib_K, 0, 2);
00425         double cy = cvmGet(&calib_K, 1, 2);
00426 
00427         // distortion coeffs
00428         double* k = calib_D.data.db;
00429 
00430         // compensate distortion iteratively
00431         double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y;
00432         for(int j = 0; j < 5; j++){
00433                 double r2 = x*x + y*y;
00434                 double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
00435                 double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
00436                 double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
00437                 x = (x0 - delta_x)*icdist;
00438                 y = (y0 - delta_y)*icdist;
00439         }
00440         // apply compensation
00441         point.x = x/ifx + cx;
00442         point.y = y/ify + cy;
00443 */
00444 }
00445 
00446 void Camera::Undistort(vector<PointDouble >& points)
00447 {
00448 /*
00449         // focal length
00450         double ifx = 1./cvmGet(&calib_K, 0, 0);
00451         double ify = 1./cvmGet(&calib_K, 1, 1);
00452 
00453         // principal point
00454         double cx = cvmGet(&calib_K, 0, 2);
00455         double cy = cvmGet(&calib_K, 1, 2);
00456 
00457         // distortion coeffs
00458         double* k = calib_D.data.db;
00459 
00460         for(unsigned int i = 0; i < points.size(); i++)
00461         {
00462                 // compensate distortion iteratively
00463                 double x = (points[i].x - cx)*ifx, y = (points[i].y - cy)*ify, x0 = x, y0 = y;
00464                 for(int j = 0; j < 5; j++){
00465                         double r2 = x*x + y*y;
00466                         double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
00467                         double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
00468                         double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
00469                         x = (x0 - delta_x)*icdist;
00470                         y = (y0 - delta_y)*icdist;
00471                 }
00472                 // apply compensation
00473                 points[i].x = x/ifx + cx;
00474                 points[i].y = y/ify + cy;
00475         }
00476 */
00477 }
00478 
00479 void Camera::Undistort(CvPoint2D32f& point)
00480 {
00481 /*
00482         // focal length
00483         double ifx = 1./cvmGet(&calib_K, 0, 0);
00484         double ify = 1./cvmGet(&calib_K, 1, 1);
00485 
00486         // principal point
00487         double cx = cvmGet(&calib_K, 0, 2);
00488         double cy = cvmGet(&calib_K, 1, 2);
00489 
00490         // distortion coeffs
00491         double* k = calib_D.data.db;
00492 
00493 
00494         // compensate distortion iteratively
00495         double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y;
00496         for(int j = 0; j < 5; j++){
00497                 double r2 = x*x + y*y;
00498                 double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
00499                 double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
00500                 double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
00501                 x = (x0 - delta_x)*icdist;
00502                 y = (y0 - delta_y)*icdist;
00503         }
00504         // apply compensation
00505         point.x = float(x/ifx + cx);
00506         point.y = float(y/ify + cy);
00507 */
00508 }
00509 
00510 /*
00511         template<class PointType>
00512         void Undistort(PointType& point) {
00513                 // focal length
00514                 double ifx = 1./cvmGet(&calib_K, 0, 0);
00515                 double ify = 1./cvmGet(&calib_K, 1, 1);
00516 
00517                 // principal point
00518                 double cx = cvmGet(&calib_K, 0, 2);
00519                 double cy = cvmGet(&calib_K, 1, 2);
00520 
00521                 // distortion coeffs
00522                 double* k = calib_D.data.db;
00523 
00524                 // compensate distortion iteratively
00525                 double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y;
00526                 for(int j = 0; j < 5; j++){
00527                         double r2 = x*x + y*y;
00528                         double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
00529                         double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
00530                         double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
00531                         x = (x0 - delta_x)*icdist;
00532                         y = (y0 - delta_y)*icdist;
00533                 }
00534                 // apply compensation
00535                 point.x = x/ifx + cx;
00536                 point.y = y/ify + cy;
00537         }
00538 */
00539 
00540 void Camera::Distort(vector<PointDouble>& points) 
00541 {
00542 /*
00543         double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy
00544         double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1);
00545         double _fx = 1./fx, _fy = 1./fy;
00546         double* k = calib_D.data.db;
00547 
00548         double k1 = k[0], k2 = k[1];
00549         double p1 = k[2], p2 = k[3];
00550 
00551         for(unsigned int i = 0; i < points.size(); i++)
00552         {
00553                 // Distort
00554                 double y = (points[i].y - v0)*_fy;
00555                 double y2 = y*y;
00556                 double _2p1y = 2*p1*y;
00557                 double _3p1y2 = 3*p1*y2;
00558                 double p2y2 = p2*y2;
00559 
00560                 double x = (points[i].x - u0)*_fx;
00561                 double x2 = x*x;
00562                 double r2 = x2 + y2;
00563                 double d = 1 + (k1 + k2*r2)*r2;
00564 
00565                 points[i].x = fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0;
00566                 points[i].y = fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0;
00567         }
00568 */
00569 }
00570 
00571 void Camera::Distort(PointDouble & point) 
00572 {
00573 /*
00574         double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy
00575         double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1);
00576         double _fx = 1./fx, _fy = 1./fy;
00577         double* k = calib_D.data.db;
00578 
00579         double k1 = k[0], k2 = k[1];
00580         double p1 = k[2], p2 = k[3];
00581 
00582         // Distort
00583         double y = (point.y - v0)*_fy;
00584         double y2 = y*y;
00585         double _2p1y = 2*p1*y;
00586         double _3p1y2 = 3*p1*y2;
00587         double p2y2 = p2*y2;
00588 
00589         double x = (point.x - u0)*_fx;
00590         double x2 = x*x;
00591         double r2 = x2 + y2;
00592         double d = 1 + (k1 + k2*r2)*r2;
00593 
00594         point.x = fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0;
00595         point.y = fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0;
00596 */
00597 }
00598 
00599 void Camera::Distort(CvPoint2D32f & point) 
00600 {
00601 /*
00602         double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy
00603         double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1);
00604         double _fx = 1./fx, _fy = 1./fy;
00605         double* k = calib_D.data.db;
00606 
00607         double k1 = k[0], k2 = k[1];
00608         double p1 = k[2], p2 = k[3];
00609 
00610         // Distort
00611         double y = (point.y - v0)*_fy;
00612         double y2 = y*y;
00613         double _2p1y = 2*p1*y;
00614         double _3p1y2 = 3*p1*y2;
00615         double p2y2 = p2*y2;
00616 
00617         double x = (point.x - u0)*_fx;
00618         double x2 = x*x;
00619         double r2 = x2 + y2;
00620         double d = 1 + (k1 + k2*r2)*r2;
00621 
00622         point.x = float(fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0);
00623         point.y = float(fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0);
00624 */
00625 }
00626 
00627 void Camera::CalcExteriorOrientation(vector<CvPoint3D64f>& pw, vector<CvPoint2D64f>& pi,
00628                                         Pose *pose)
00629 {
00630         double ext_rodriques[3];
00631         double ext_translate[3];
00632         CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
00633         CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
00634         CvMat *object_points = cvCreateMat((int)pw.size(), 1, CV_32FC3);
00635         CvMat *image_points = cvCreateMat((int)pi.size(), 1, CV_32FC2);
00636         for (size_t i=0; i<pw.size(); i++) {
00637                 object_points->data.fl[i*3+0] = (float)pw[i].x;
00638                 object_points->data.fl[i*3+1] = (float)pw[i].y;
00639                 object_points->data.fl[i*3+2] = (float)pw[i].z;
00640                 image_points->data.fl[i*2+0]  = (float)pi[i].x;
00641                 image_points->data.fl[i*2+1]  = (float)pi[i].y;
00642         }
00643         //cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, &calib_D, &ext_rodriques_mat, &ext_translate_mat);
00644         cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, NULL, &ext_rodriques_mat, &ext_translate_mat);
00645     pose->SetRodriques(&ext_rodriques_mat);
00646         pose->SetTranslation(&ext_translate_mat);
00647         cvReleaseMat(&object_points);
00648         cvReleaseMat(&image_points);
00649 }
00650 
00651 void Camera::CalcExteriorOrientation(vector<CvPoint3D64f>& pw, vector<PointDouble >& pi,
00652                                         CvMat *rodriques, CvMat *tra)
00653 {
00654         //assert(pw.size() == pi.size());
00655 
00656         int size = (int)pi.size();
00657 
00658         CvPoint3D64f *world_pts = new CvPoint3D64f[size];
00659         CvPoint2D64f *image_pts = new CvPoint2D64f[size];
00660 
00661         for(int i = 0; i < size; i++){
00662                 world_pts[i].x = pw[i].x;
00663                 world_pts[i].y = pw[i].y;
00664                 world_pts[i].z = pw[i].z;
00665                 // flip image points! Why???
00666                 //image_pts[i].x = x_res - pi[i].x;
00667                 //image_pts[i].y = y_res - pi[i].y;
00668                 image_pts[i].x = pi[i].x;
00669                 image_pts[i].y = pi[i].y;
00670         }
00671 
00672         double rot[3]; // rotation vector
00673         CvMat world_mat, image_mat, rot_vec;
00674         cvInitMatHeader(&world_mat, size, 1, CV_64FC3, world_pts);
00675         cvInitMatHeader(&image_mat, size, 1, CV_64FC2, image_pts);
00676         cvInitMatHeader(&rot_vec, 3, 1, CV_64FC1, rot);
00677 
00678         cvZero(tra);
00679         //cvmodFindExtrinsicCameraParams2(&world_mat, &image_mat, &calib_K, &calib_D, rodriques, tra, error);
00680         cvFindExtrinsicCameraParams2(&world_mat, &image_mat, &calib_K, &calib_D, rodriques, tra);
00681         
00682         delete[] world_pts;
00683         delete[] image_pts;
00684 }
00685 
00686 void Camera::CalcExteriorOrientation(vector<PointDouble >& pw, vector<PointDouble >& pi,
00687                                         CvMat *rodriques, CvMat *tra)
00688 {
00689         //assert(pw.size() == pi.size());
00690         int size = (int)pi.size();
00691 
00692         vector<CvPoint3D64f> pw3;
00693         pw3.resize(size);
00694         for (int i=0; i<size; i++) {
00695                 pw3[i].x = pw[i].x;
00696                 pw3[i].y = pw[i].y;
00697                 pw3[i].z = 0;
00698         }
00699 
00700         CalcExteriorOrientation(pw3, pi, rodriques, tra);
00701 }
00702 
00703 void Camera::CalcExteriorOrientation(vector<PointDouble>& pw, vector<PointDouble >& pi, Pose *pose)
00704 {
00705         double ext_rodriques[3];
00706         double ext_translate[3];
00707         CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
00708         CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
00709         CalcExteriorOrientation(pw, pi, &ext_rodriques_mat, &ext_translate_mat);
00710         pose->SetRodriques(&ext_rodriques_mat);
00711         pose->SetTranslation(&ext_translate_mat);
00712 }
00713 
00714 bool Camera::CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, CvMat *rodriques, CvMat *tra) {
00715         cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, &calib_D, rodriques, tra);
00716         return true;
00717 }
00718 
00719 bool Camera::CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, Pose *pose) {
00720         double ext_rodriques[3];
00721         double ext_translate[3];
00722         CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
00723         CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
00724         bool ret = CalcExteriorOrientation(object_points, image_points, &ext_rodriques_mat, &ext_translate_mat);
00725         pose->SetRodriques(&ext_rodriques_mat);
00726         pose->SetTranslation(&ext_translate_mat);
00727         return ret;
00728 }
00729 
00730 void Camera::ProjectPoints(vector<CvPoint3D64f>& pw, Pose *pose, vector<CvPoint2D64f>& pi) const {
00731         double ext_rodriques[3];
00732         double ext_translate[3];
00733         CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
00734         CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
00735         pose->GetRodriques(&ext_rodriques_mat);
00736         pose->GetTranslation(&ext_translate_mat);
00737         CvMat *object_points = cvCreateMat((int)pw.size(), 1, CV_32FC3);
00738         CvMat *image_points = cvCreateMat((int)pi.size(), 1, CV_32FC2);
00739         for (size_t i=0; i<pw.size(); i++) {
00740                 object_points->data.fl[i*3+0] = (float)pw[i].x;
00741                 object_points->data.fl[i*3+1] = (float)pw[i].y;
00742                 object_points->data.fl[i*3+2] = (float)pw[i].z;
00743         }
00744         cvProjectPoints2(object_points, &ext_rodriques_mat, &ext_translate_mat, &calib_K, &calib_D, image_points);  
00745         for (size_t i=0; i<pw.size(); i++) {
00746                 pi[i].x = image_points->data.fl[i*2+0];
00747                 pi[i].y = image_points->data.fl[i*2+1];
00748         }
00749         cvReleaseMat(&object_points);
00750         cvReleaseMat(&image_points);
00751 }
00752 
00753 void Camera::ProjectPoints(const CvMat* object_points, const CvMat* rotation_vector,
00754                                    const CvMat* translation_vector, CvMat* image_points) const
00755 {
00756         // Project points
00757         cvProjectPoints2(object_points, rotation_vector, translation_vector, &calib_K, &calib_D, image_points);  
00758 }
00759 
00760 void Camera::ProjectPoints(const CvMat* object_points, const Pose* pose, CvMat* image_points) const
00761 {
00762         double ext_rodriques[3];
00763         double ext_translate[3];
00764         CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
00765         CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
00766         pose->GetRodriques(&ext_rodriques_mat);
00767         pose->GetTranslation(&ext_translate_mat);
00768         cvProjectPoints2(object_points, &ext_rodriques_mat, &ext_translate_mat, &calib_K, &calib_D, image_points);  
00769 }
00770 
00771 void Camera::ProjectPoints(const CvMat* object_points, double gl[16], CvMat* image_points) const
00772 {
00773         double glm[4][4] = {
00774                 gl[0], gl[4], gl[8],  gl[12],
00775                 gl[1], gl[5], gl[9],  gl[13],
00776                 gl[2], gl[6], gl[10], gl[14],
00777                 gl[3], gl[7], gl[11], gl[15],
00778         };
00779         CvMat glm_mat = cvMat(4, 4, CV_64F, glm);
00780         
00781         // For some reason we need to mirror both y and z ???
00782         double cv_mul_data[4][4];
00783         CvMat cv_mul = cvMat(4, 4, CV_64F, cv_mul_data);
00784         cvSetIdentity(&cv_mul);
00785         cvmSet(&cv_mul, 1, 1, -1);
00786         cvmSet(&cv_mul, 2, 2, -1);
00787         cvMatMul(&cv_mul, &glm_mat, &glm_mat);
00788         
00789         // Rotation
00790         Rotation r;
00791         r.SetMatrix(&glm_mat);
00792         double rod[3]; 
00793         CvMat rod_mat=cvMat(3, 1, CV_64F, rod);
00794         r.GetRodriques(&rod_mat);
00795         // Translation
00796         double tra[3] = { glm[0][3], glm[1][3], glm[2][3] };
00797         CvMat tra_mat = cvMat(3, 1, CV_64F, tra);
00798         // Project points
00799         ProjectPoints(object_points, &rod_mat, &tra_mat, image_points);
00800 }
00801 
00802 void Camera::ProjectPoint(const CvPoint3D64f pw, const Pose *pose, CvPoint2D64f &pi) const {
00803         float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z};
00804         float image_points_data[2] = {0};
00805         CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data);
00806         CvMat image_points = cvMat(1, 1, CV_32FC2, image_points_data);
00807         ProjectPoints(&object_points, pose, &image_points);
00808         pi.x = image_points.data.fl[0];
00809         pi.y = image_points.data.fl[1];
00810 }
00811 
00812 void Camera::ProjectPoint(const CvPoint3D32f pw, const Pose *pose, CvPoint2D32f &pi) const {
00813         float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z};
00814         float image_points_data[2] = {0};
00815         CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data);
00816         CvMat image_points = cvMat(1, 1, CV_32FC2, image_points_data);
00817         ProjectPoints(&object_points, pose, &image_points);
00818         pi.x = image_points.data.fl[0];
00819         pi.y = image_points.data.fl[1];
00820 }
00821 
00822 Homography::Homography() {
00823         cvInitMatHeader(&H, 3, 3, CV_64F, H_data);
00824 }
00825 
00826 void Homography::Find(const vector<PointDouble  >& pw, const vector<PointDouble  >& pi)
00827 {
00828         assert(pw.size() == pi.size());
00829         int size = (int)pi.size();
00830 
00831         CvPoint2D64f *srcp = new CvPoint2D64f[size];
00832         CvPoint2D64f *dstp = new CvPoint2D64f[size];
00833 
00834         for(int i = 0; i < size; ++i){
00835                 srcp[i].x = pw[i].x;
00836                 srcp[i].y = pw[i].y;
00837 
00838                 dstp[i].x = pi[i].x;
00839                 dstp[i].y = pi[i].y;
00840         }
00841         
00842         CvMat src_pts, dst_pts;
00843         cvInitMatHeader(&dst_pts, 1, size, CV_64FC2, dstp);
00844         cvInitMatHeader(&src_pts, 1, size, CV_64FC2, srcp);
00845 
00846 #ifdef OPENCV11
00847         cvFindHomography(&src_pts, &dst_pts, &H, 0, 0, 0);
00848 #else
00849         cvFindHomography(&src_pts, &dst_pts, &H);
00850 #endif
00851 
00852         delete[] srcp;
00853         delete[] dstp;
00854 }
00855 
00856 void Homography::ProjectPoints(const vector<PointDouble>& from, vector<PointDouble>& to) 
00857 {
00858         int size = (int)from.size();
00859 
00860         CvPoint3D64f *srcp = new CvPoint3D64f[size];
00861 
00862         for(int i = 0; i < size; ++i){
00863                 srcp[i].x = from[i].x;
00864                 srcp[i].y = from[i].y;
00865                 srcp[i].z = 1;
00866         }
00867 
00868         CvPoint3D64f *dstp = new CvPoint3D64f[size];
00869 
00870         CvMat src_pts, dst_pts;
00871         cvInitMatHeader(&src_pts, 1, size, CV_64FC3, srcp);
00872         cvInitMatHeader(&dst_pts, 1, size, CV_64FC3, dstp);
00873         
00874         cvTransform(&src_pts, &dst_pts, &H);
00875 
00876         to.clear();
00877         for(int i = 0; i < size; ++i)
00878         {       
00879                 PointDouble pt;
00880                 pt.x = dstp[i].x / dstp[i].z;
00881                 pt.y = dstp[i].y / dstp[i].z;
00882                 
00883                 to.push_back(pt);
00884         }
00885         
00886         delete[] srcp;
00887         delete[] dstp;
00888 }
00889 
00890 } // namespace alvar


ar_track_alvar
Author(s): Scott Niekum
autogenerated on Sun Oct 5 2014 22:16:26