Camera.cpp
Go to the documentation of this file.
00001 /*
00002  * This file is part of ALVAR, A Library for Virtual and Augmented Reality.
00003  *
00004  * Copyright 2007-2012 VTT Technical Research Centre of Finland
00005  *
00006  * Contact: VTT Augmented Reality Team <alvar.info@vtt.fi>
00007  *          <http://www.vtt.fi/multimedia/alvar.html>
00008  *
00009  * ALVAR is free software; you can redistribute it and/or modify it under the
00010  * terms of the GNU Lesser General Public License as published by the Free
00011  * Software Foundation; either version 2.1 of the License, or (at your option)
00012  * any later version.
00013  *
00014  * This library is distributed in the hope that it will be useful, but WITHOUT
00015  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
00016  * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
00017  * for more details.
00018  *
00019  * You should have received a copy of the GNU Lesser General Public License
00020  * along with ALVAR; if not, see
00021  * <http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html>.
00022  */
00023 
00024 #include "ar_track_alvar/Alvar.h"
00025 #include "ar_track_alvar/Camera.h"
00026 #include "ar_track_alvar/FileFormatUtils.h"
00027 #include <memory>
00028 
00029 using namespace std;
00030 
00031 namespace alvar {
00032 using namespace std;
00033 
00034 
00035 void ProjPoints::Reset() {
00036         object_points.clear();
00037         image_points.clear();
00038         point_counts.clear();
00039 }
00040 
00041 // TODO: Does it matter what the etalon_square_size is???
00042 bool ProjPoints::AddPointsUsingChessboard(IplImage *image, double etalon_square_size, int etalon_rows, int etalon_columns, bool visualize) {
00043         if (image->width == 0) return false;
00044         IplImage *gray = cvCreateImage(cvSize(image->width, image->height), IPL_DEPTH_8U, 1);
00045         CvPoint2D32f *corners = new CvPoint2D32f[etalon_rows*etalon_columns];
00046         if (image->nChannels == 1) 
00047                 cvCopy(image, gray);
00048         else
00049                 cvCvtColor(image, gray, CV_RGB2GRAY);
00050         width = image->width;
00051         height = image->height;
00052 
00053         int point_count = 0;
00054         int pattern_was_found = cvFindChessboardCorners(gray, cvSize(etalon_rows, etalon_columns), corners, &point_count);
00055         if (!pattern_was_found) point_count=0;
00056         if (point_count > 0) {
00057                 cvFindCornerSubPix(gray, corners, point_count, cvSize(5,5), cvSize(-1,-1),
00058                         cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 10, 0.01f));
00059                 for (int i=0; i<point_count; i++) {
00060                         CvPoint3D64f po;
00061                         CvPoint2D64f pi;
00062                         po.x = etalon_square_size*(i%etalon_rows);
00063                         po.y = etalon_square_size*(i/etalon_rows);
00064                         po.z = 0;
00065                         pi.x = corners[i].x;
00066                         pi.y = corners[i].y;
00067                         object_points.push_back(po);
00068                         image_points.push_back(pi);
00069                 }
00070                 point_counts.push_back(point_count);
00071         }
00072         if (visualize) {
00073                 cvDrawChessboardCorners(image, cvSize(etalon_rows, etalon_columns),
00074                       corners, point_count, false /*pattern_was_found*/);
00075         }
00076         delete [] corners;
00077         cvReleaseImage(&gray);
00078         if (point_count > 0) return true;
00079         return false;
00080 }
00081 
00082 bool ProjPoints::AddPointsUsingMarkers(vector<PointDouble> &marker_corners, vector<PointDouble> &marker_corners_img, IplImage* image)
00083 {
00084         width = image->width;
00085         height = image->height;
00086         if ((marker_corners.size() == marker_corners_img.size()) &&
00087                 (marker_corners.size() == 4))
00088         {
00089                 for (size_t p=0; p<marker_corners.size(); p++) {
00090                         CvPoint3D64f po;
00091                         CvPoint2D64f pi;
00092                         po.x = marker_corners[p].x;
00093                         po.y = marker_corners[p].y;
00094                         po.z = 0;
00095                         pi.x = marker_corners_img[p].x;
00096                         pi.y = marker_corners_img[p].y;
00097                         object_points.push_back(po);
00098                         image_points.push_back(pi);
00099                 }
00100                 point_counts.push_back(marker_corners.size());
00101         }
00102 
00103         return true;
00104 }
00105 
00106 Camera::Camera() {
00107         calib_K = cvMat(3, 3, CV_64F, calib_K_data);
00108         calib_D = cvMat(4, 1, CV_64F, calib_D_data);
00109         memset(calib_K_data,0,sizeof(double)*3*3);
00110         memset(calib_D_data,0,sizeof(double)*4);
00111         calib_K_data[0][0] = 550; // Just some focal length by default
00112         calib_K_data[1][1] = 550; // Just some focal length by default
00113         calib_K_data[0][2] = 320;
00114         calib_K_data[1][2] = 240;
00115         calib_K_data[2][2] = 1;
00116         calib_x_res = 640;
00117         calib_y_res = 480;
00118         x_res = 640;
00119         y_res = 480;
00120 }
00121 
00122 
00123 Camera::Camera(ros::NodeHandle & n, std::string cam_info_topic):n_(n) 
00124 {
00125         calib_K = cvMat(3, 3, CV_64F, calib_K_data);
00126         calib_D = cvMat(4, 1, CV_64F, calib_D_data);
00127         memset(calib_K_data,0,sizeof(double)*3*3);
00128         memset(calib_D_data,0,sizeof(double)*4);
00129         calib_K_data[0][0] = 550; // Just some focal length by default
00130         calib_K_data[1][1] = 550; // Just some focal length by default
00131         calib_K_data[0][2] = 320;
00132         calib_K_data[1][2] = 240;
00133         calib_K_data[2][2] = 1;
00134         calib_x_res = 640;
00135         calib_y_res = 480;
00136         x_res = 640;
00137         y_res = 480;
00138         cameraInfoTopic_ = cam_info_topic;
00139         ROS_INFO ("Subscribing to info topic");
00140     sub_ = n_.subscribe (cameraInfoTopic_, 1, &Camera::camInfoCallback, this);
00141     getCamInfo_ = false;
00142 }
00143 
00144 
00145 
00146 //
00147 //Camera::Camera(int w, int h) {
00148 //      calib_K = cvMat(3, 3, CV_64F, calib_K_data);
00149 //      calib_D = cvMat(4, 1, CV_64F, calib_D_data);
00150 //      memset(calib_K_data,0,sizeof(double)*3*3);
00151 //      memset(calib_D_data,0,sizeof(double)*4);
00152 //      calib_K_data[0][0] = w/2;
00153 //      calib_K_data[1][1] = w/2;
00154 //      calib_K_data[0][2] = w/2;
00155 //      calib_K_data[1][2] = h/2;
00156 //      calib_K_data[2][2] = 1;
00157 //      calib_x_res = w;
00158 //      calib_y_res = h;
00159 //      x_res = w;
00160 //      y_res = h;
00161 //}
00162 
00163 void Camera::SetSimpleCalib(int _x_res, int _y_res, double f_fac)
00164 {
00165         memset(calib_K_data,0,sizeof(double)*3*3);
00166         memset(calib_D_data,0,sizeof(double)*4);
00167         calib_K_data[0][0] = _x_res*f_fac; // Just some focal length by default
00168         calib_K_data[1][1] = _x_res*f_fac; // Just some focal length by default
00169         calib_K_data[0][2] = _x_res/2;
00170         calib_K_data[1][2] = _y_res/2;
00171         calib_K_data[2][2] = 1;
00172         calib_x_res = _x_res;
00173         calib_y_res = _y_res;
00174 }
00175 
00176 bool Camera::LoadCalibXML(const char *calibfile) {
00177         TiXmlDocument document;
00178         if (!document.LoadFile(calibfile)) return false;
00179         TiXmlElement *xml_root = document.RootElement();
00180 
00181         return 
00182                 xml_root->QueryIntAttribute("width", &calib_x_res) == TIXML_SUCCESS &&
00183                 xml_root->QueryIntAttribute("height", &calib_y_res) == TIXML_SUCCESS &&
00184                 FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("intrinsic_matrix"), &calib_K) && 
00185                 FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("distortion"), &calib_D);
00186 }
00187 
00188 bool Camera::LoadCalibOpenCV(const char *calibfile) {
00189         cvSetErrMode(CV_ErrModeSilent);
00190         CvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_READ); 
00191         cvSetErrMode(CV_ErrModeLeaf);
00192         if(fs){
00193                 CvFileNode* root_node = cvGetRootFileNode(fs);
00194                 // K Intrinsic
00195                 CvFileNode* intrinsic_mat_node = cvGetFileNodeByName(fs, root_node, "intrinsic_matrix");
00196                 CvMat* intrinsic_mat = reinterpret_cast<CvMat*>(cvRead(fs, intrinsic_mat_node));
00197                 cvmSet(&calib_K, 0, 0, cvmGet(intrinsic_mat, 0, 0));
00198                 cvmSet(&calib_K, 0, 1, cvmGet(intrinsic_mat, 0, 1));
00199                 cvmSet(&calib_K, 0, 2, cvmGet(intrinsic_mat, 0, 2));
00200                 cvmSet(&calib_K, 1, 0, cvmGet(intrinsic_mat, 1, 0));
00201                 cvmSet(&calib_K, 1, 1, cvmGet(intrinsic_mat, 1, 1));
00202                 cvmSet(&calib_K, 1, 2, cvmGet(intrinsic_mat, 1, 2));
00203                 cvmSet(&calib_K, 2, 0, cvmGet(intrinsic_mat, 2, 0));
00204                 cvmSet(&calib_K, 2, 1, cvmGet(intrinsic_mat, 2, 1));
00205                 cvmSet(&calib_K, 2, 2, cvmGet(intrinsic_mat, 2, 2));
00206 
00207                 // D Distortion
00208                 CvFileNode* dist_mat_node = cvGetFileNodeByName(fs, root_node, "distortion");
00209                 CvMat* dist_mat = reinterpret_cast<CvMat*>(cvRead(fs, dist_mat_node));
00210                 cvmSet(&calib_D, 0, 0, cvmGet(dist_mat, 0, 0));
00211                 cvmSet(&calib_D, 1, 0, cvmGet(dist_mat, 1, 0));
00212                 cvmSet(&calib_D, 2, 0, cvmGet(dist_mat, 2, 0));
00213                 cvmSet(&calib_D, 3, 0, cvmGet(dist_mat, 3, 0));
00214 
00215                 // Resolution
00216                 CvFileNode* width_node = cvGetFileNodeByName(fs, root_node, "width");
00217                 CvFileNode* height_node = cvGetFileNodeByName(fs, root_node, "height");
00218                 calib_x_res = width_node->data.i;
00219                 calib_y_res = height_node->data.i;
00220                 cvReleaseFileStorage(&fs); 
00221                 return true;
00222         }
00223         // reset error status
00224         cvSetErrStatus(CV_StsOk);
00225         return false;
00226 }
00227 
00228 void Camera::SetCameraInfo(const sensor_msgs::CameraInfo &camInfo)
00229 {
00230     cam_info_ = camInfo;
00231 
00232     calib_x_res = cam_info_.width;
00233     calib_y_res = cam_info_.height;
00234     x_res = calib_x_res;
00235     y_res = calib_y_res;
00236 
00237     cvmSet(&calib_K, 0, 0, cam_info_.K[0]);
00238     cvmSet(&calib_K, 0, 1, cam_info_.K[1]);
00239     cvmSet(&calib_K, 0, 2, cam_info_.K[2]);
00240     cvmSet(&calib_K, 1, 0, cam_info_.K[3]);
00241     cvmSet(&calib_K, 1, 1, cam_info_.K[4]);
00242     cvmSet(&calib_K, 1, 2, cam_info_.K[5]);
00243     cvmSet(&calib_K, 2, 0, cam_info_.K[6]);
00244     cvmSet(&calib_K, 2, 1, cam_info_.K[7]);
00245     cvmSet(&calib_K, 2, 2, cam_info_.K[8]);
00246 
00247     if (cam_info_.D.size() >= 4) {
00248         cvmSet(&calib_D, 0, 0, cam_info_.D[0]);
00249         cvmSet(&calib_D, 1, 0, cam_info_.D[1]);
00250         cvmSet(&calib_D, 2, 0, cam_info_.D[2]);
00251         cvmSet(&calib_D, 3, 0, cam_info_.D[3]);
00252     } else {
00253         cvmSet(&calib_D, 0, 0, 0);
00254         cvmSet(&calib_D, 1, 0, 0);
00255         cvmSet(&calib_D, 2, 0, 0);
00256         cvmSet(&calib_D, 3, 0, 0);
00257     }
00258 }
00259 
00260 void Camera::camInfoCallback (const sensor_msgs::CameraInfoConstPtr & cam_info)
00261   {
00262     if (!getCamInfo_)
00263     {
00264         SetCameraInfo(*cam_info);
00265         getCamInfo_ = true;
00266         sub_.shutdown();
00267     }
00268   }
00269 
00270 bool Camera::SetCalib(const char *calibfile, int _x_res, int _y_res, FILE_FORMAT format) {
00271         x_res = _x_res;
00272         y_res = _y_res;
00273         if(!calibfile) return false;
00274 
00275         bool success = false;
00276         switch (format) {
00277                 case FILE_FORMAT_XML:
00278                         success = LoadCalibXML(calibfile);
00279                         break;
00280                 case FILE_FORMAT_OPENCV:
00281                 case FILE_FORMAT_DEFAULT:
00282                         success = LoadCalibOpenCV(calibfile);
00283                         break;
00284                 default:
00285                         // TODO: throw exception?
00286                         break;
00287         };
00288 
00289         if (success) {
00290                 // Scale matrix in case of different resolution calibration.
00291                 // The OpenCV documentation says:
00292                 // - If an image from camera is up-sampled/down-sampled by some factor, all intrinsic camera parameters 
00293                 //   (fx, fy, cx and cy) should be scaled (multiplied/divided, respectively) by the same factor.
00294                 // - The distortion coefficients remain the same regardless of the captured image resolution.
00295                 if ((calib_x_res != x_res) || (calib_y_res != y_res)) {
00296                         calib_K_data[0][0] *= (double(x_res)/double(calib_x_res));
00297                         calib_K_data[0][2] *= (double(x_res)/double(calib_x_res));
00298                         calib_K_data[1][1] *= (double(y_res)/double(calib_y_res));
00299                         calib_K_data[1][2] *= (double(y_res)/double(calib_y_res));
00300                 }
00301         }
00302         return success;
00303 }
00304 
00305 bool Camera::SaveCalibXML(const char *calibfile) {
00306         TiXmlDocument document;
00307         document.LinkEndChild(new TiXmlDeclaration("1.0", "UTF-8", "no"));
00308         document.LinkEndChild(new TiXmlElement("camera"));
00309         TiXmlElement *xml_root = document.RootElement();
00310         xml_root->SetAttribute("width", calib_x_res);
00311         xml_root->SetAttribute("height", calib_y_res);
00312         xml_root->LinkEndChild(FileFormatUtils::createXMLMatrix("intrinsic_matrix", &calib_K));
00313         xml_root->LinkEndChild(FileFormatUtils::createXMLMatrix("distortion", &calib_D));
00314         return document.SaveFile(calibfile);
00315 }
00316 
00317 bool Camera::SaveCalibOpenCV(const char *calibfile) {
00318         cvSetErrMode(CV_ErrModeSilent);
00319         CvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_WRITE); 
00320         cvSetErrMode(CV_ErrModeLeaf);
00321         if(fs){
00322                 cvWrite(fs, "intrinsic_matrix", &calib_K, cvAttrList(0,0)); 
00323                 cvWrite(fs, "distortion", &calib_D, cvAttrList(0,0)); 
00324                 //cvWriteReal(fs, "fov_x", data.fov_x); 
00325                 //cvWriteReal(fs, "fov_y", data.fov_y); 
00326                 cvWriteInt(fs, "width", calib_x_res);
00327                 cvWriteInt(fs, "height", calib_y_res);
00328                 cvReleaseFileStorage(&fs); 
00329                 return true;
00330         }
00331         // reset error status
00332         cvSetErrStatus(CV_StsOk);
00333         return false;
00334 }
00335 
00336 bool Camera::SaveCalib(const char *calibfile, FILE_FORMAT format) {
00337         if(!calibfile)
00338                 return false;
00339 
00340         switch (format) {
00341                 case FILE_FORMAT_XML:
00342                         return SaveCalibXML(calibfile);
00343                 case FILE_FORMAT_OPENCV:
00344                 case FILE_FORMAT_DEFAULT:
00345                         return SaveCalibOpenCV(calibfile);
00346                 default:
00347                         return false;
00348         };
00349 }
00350 
00351 void Camera::Calibrate(ProjPoints &pp)
00352 {
00353         CvMat *object_points = cvCreateMat((int)pp.object_points.size(), 1, CV_32FC3);
00354         CvMat *image_points = cvCreateMat((int)pp.image_points.size(), 1, CV_32FC2);
00355         const CvMat point_counts = cvMat((int)pp.point_counts.size(), 1, CV_32SC1, &pp.point_counts[0]);
00356         for (size_t i=0; i<pp.object_points.size(); i++) {
00357                 object_points->data.fl[i*3+0] = (float)pp.object_points[i].x;
00358                 object_points->data.fl[i*3+1] = (float)pp.object_points[i].y;
00359                 object_points->data.fl[i*3+2] = (float)pp.object_points[i].z;
00360                 image_points->data.fl[i*2+0]  = (float)pp.image_points[i].x;
00361                 image_points->data.fl[i*2+1]  = (float)pp.image_points[i].y;
00362         }
00363         cvCalibrateCamera2(object_points, image_points, &point_counts, 
00364                 cvSize(pp.width, pp.height), 
00365                 &calib_K, &calib_D, 0, 0, CV_CALIB_USE_INTRINSIC_GUESS);
00366 
00367         calib_x_res = pp.width;
00368         calib_y_res = pp.height;
00369         
00370         cvReleaseMat(&object_points);
00371         cvReleaseMat(&image_points);
00372 }
00373 
00374 void Camera::SetRes(int _x_res, int _y_res) {
00375         x_res = _x_res;
00376         y_res = _y_res;
00377         // Scale matrix in case of different resolution calibration.
00378         // The OpenCV documentation says:
00379         // - If an image from camera is up-sampled/down-sampled by some factor, all intrinsic camera parameters 
00380         //   (fx, fy, cx and cy) should be scaled (multiplied/divided, respectively) by the same factor.
00381         // - The distortion coefficients remain the same regardless of the captured image resolution.
00382         if ((calib_x_res != x_res) || (calib_y_res != y_res)) {
00383                 calib_K_data[0][0] *= (double(x_res)/double(calib_x_res));
00384                 calib_K_data[0][2] *= (double(x_res)/double(calib_x_res));
00385                 calib_K_data[1][1] *= (double(y_res)/double(calib_y_res));
00386                 calib_K_data[1][2] *= (double(y_res)/double(calib_y_res));
00387         }
00388 }
00389 
00390 // TODO: Better approach for this...
00391 // Note, the proj_matrix[8] is now negated. This is due to the fact
00392 // that with OpenCV and OpenGL projection matrices both y and z
00393 // should be mirrored. All other components are 
00394 void Camera::GetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height, const float far_clip /*= 1000.0f*/, const float near_clip /*= 0.1f*/) {
00395         proj_matrix[0]  = 2.0f * calib_K_data[0][0] / float(width);
00396         proj_matrix[1]  = 0;
00397         proj_matrix[2]  = 0;
00398         proj_matrix[3]  = 0;
00399         proj_matrix[4]  = 2.0f * calib_K_data[0][1] / float(width); // skew
00400         proj_matrix[5]  = 2.0f * calib_K_data[1][1] / float(height);
00401         proj_matrix[6]  = 0;
00402         proj_matrix[7]  = 0;
00403         //proj_matrix[8]        = (2.0f * calib_K_data[0][2] / float(width)) - 1.0f;
00404         proj_matrix[8]  = -(2.0f * calib_K_data[0][2] / float(width)) + 1.0f;
00405         proj_matrix[9]  = (2.0f * calib_K_data[1][2] / float(height)) - 1.0f;
00406         proj_matrix[10] = -(far_clip + near_clip)/(far_clip - near_clip);
00407         proj_matrix[11] = -1.0f;
00408         proj_matrix[12] = 0;
00409         proj_matrix[13] = 0;
00410         proj_matrix[14] = -2.0f * far_clip * near_clip / (far_clip - near_clip);
00411         proj_matrix[15] = 0;
00412 }
00413 
00414 void Camera::SetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height) {
00415         x_res = width;
00416         y_res = height;
00417         calib_x_res = width;
00418         calib_y_res = height;
00419         calib_K_data[0][0] = proj_matrix[0] * float(width) / 2.0f;
00420         calib_K_data[0][1] = proj_matrix[4] * float(width) / 2.0f;
00421         calib_K_data[1][1] = proj_matrix[5] * float(height) / 2.0f;
00422         //calib_K_data[0][2] = (proj_matrix[8] + 1.0f) * float(width) / 2.0f;
00423         calib_K_data[0][2] = (-proj_matrix[8] + 1.0f) * float(width) / 2.0f; // Is this ok?
00424         calib_K_data[1][2] = (proj_matrix[9] + 1.0f) * float(height) / 2.0f;
00425         calib_K_data[2][2] = 1;
00426 }
00427 
00428 void Camera::Undistort(PointDouble &point)
00429 {
00430 /*
00431         // focal length
00432         double ifx = 1./cvmGet(&calib_K, 0, 0);
00433         double ify = 1./cvmGet(&calib_K, 1, 1);
00434 
00435         // principal point
00436         double cx = cvmGet(&calib_K, 0, 2);
00437         double cy = cvmGet(&calib_K, 1, 2);
00438 
00439         // distortion coeffs
00440         double* k = calib_D.data.db;
00441 
00442         // compensate distortion iteratively
00443         double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y;
00444         for(int j = 0; j < 5; j++){
00445                 double r2 = x*x + y*y;
00446                 double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
00447                 double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
00448                 double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
00449                 x = (x0 - delta_x)*icdist;
00450                 y = (y0 - delta_y)*icdist;
00451         }
00452         // apply compensation
00453         point.x = x/ifx + cx;
00454         point.y = y/ify + cy;
00455 */
00456 }
00457 
00458 void Camera::Undistort(vector<PointDouble >& points)
00459 {
00460 /*
00461         // focal length
00462         double ifx = 1./cvmGet(&calib_K, 0, 0);
00463         double ify = 1./cvmGet(&calib_K, 1, 1);
00464 
00465         // principal point
00466         double cx = cvmGet(&calib_K, 0, 2);
00467         double cy = cvmGet(&calib_K, 1, 2);
00468 
00469         // distortion coeffs
00470         double* k = calib_D.data.db;
00471 
00472         for(unsigned int i = 0; i < points.size(); i++)
00473         {
00474                 // compensate distortion iteratively
00475                 double x = (points[i].x - cx)*ifx, y = (points[i].y - cy)*ify, x0 = x, y0 = y;
00476                 for(int j = 0; j < 5; j++){
00477                         double r2 = x*x + y*y;
00478                         double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
00479                         double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
00480                         double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
00481                         x = (x0 - delta_x)*icdist;
00482                         y = (y0 - delta_y)*icdist;
00483                 }
00484                 // apply compensation
00485                 points[i].x = x/ifx + cx;
00486                 points[i].y = y/ify + cy;
00487         }
00488 */
00489 }
00490 
00491 void Camera::Undistort(CvPoint2D32f& point)
00492 {
00493 /*
00494         // focal length
00495         double ifx = 1./cvmGet(&calib_K, 0, 0);
00496         double ify = 1./cvmGet(&calib_K, 1, 1);
00497 
00498         // principal point
00499         double cx = cvmGet(&calib_K, 0, 2);
00500         double cy = cvmGet(&calib_K, 1, 2);
00501 
00502         // distortion coeffs
00503         double* k = calib_D.data.db;
00504 
00505 
00506         // compensate distortion iteratively
00507         double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y;
00508         for(int j = 0; j < 5; j++){
00509                 double r2 = x*x + y*y;
00510                 double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
00511                 double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
00512                 double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
00513                 x = (x0 - delta_x)*icdist;
00514                 y = (y0 - delta_y)*icdist;
00515         }
00516         // apply compensation
00517         point.x = float(x/ifx + cx);
00518         point.y = float(y/ify + cy);
00519 */
00520 }
00521 
00522 /*
00523         template<class PointType>
00524         void Undistort(PointType& point) {
00525                 // focal length
00526                 double ifx = 1./cvmGet(&calib_K, 0, 0);
00527                 double ify = 1./cvmGet(&calib_K, 1, 1);
00528 
00529                 // principal point
00530                 double cx = cvmGet(&calib_K, 0, 2);
00531                 double cy = cvmGet(&calib_K, 1, 2);
00532 
00533                 // distortion coeffs
00534                 double* k = calib_D.data.db;
00535 
00536                 // compensate distortion iteratively
00537                 double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y;
00538                 for(int j = 0; j < 5; j++){
00539                         double r2 = x*x + y*y;
00540                         double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
00541                         double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
00542                         double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
00543                         x = (x0 - delta_x)*icdist;
00544                         y = (y0 - delta_y)*icdist;
00545                 }
00546                 // apply compensation
00547                 point.x = x/ifx + cx;
00548                 point.y = y/ify + cy;
00549         }
00550 */
00551 
00552 void Camera::Distort(vector<PointDouble>& points) 
00553 {
00554 /*
00555         double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy
00556         double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1);
00557         double _fx = 1./fx, _fy = 1./fy;
00558         double* k = calib_D.data.db;
00559 
00560         double k1 = k[0], k2 = k[1];
00561         double p1 = k[2], p2 = k[3];
00562 
00563         for(unsigned int i = 0; i < points.size(); i++)
00564         {
00565                 // Distort
00566                 double y = (points[i].y - v0)*_fy;
00567                 double y2 = y*y;
00568                 double _2p1y = 2*p1*y;
00569                 double _3p1y2 = 3*p1*y2;
00570                 double p2y2 = p2*y2;
00571 
00572                 double x = (points[i].x - u0)*_fx;
00573                 double x2 = x*x;
00574                 double r2 = x2 + y2;
00575                 double d = 1 + (k1 + k2*r2)*r2;
00576 
00577                 points[i].x = fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0;
00578                 points[i].y = fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0;
00579         }
00580 */
00581 }
00582 
00583 void Camera::Distort(PointDouble & point) 
00584 {
00585 /*
00586         double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy
00587         double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1);
00588         double _fx = 1./fx, _fy = 1./fy;
00589         double* k = calib_D.data.db;
00590 
00591         double k1 = k[0], k2 = k[1];
00592         double p1 = k[2], p2 = k[3];
00593 
00594         // Distort
00595         double y = (point.y - v0)*_fy;
00596         double y2 = y*y;
00597         double _2p1y = 2*p1*y;
00598         double _3p1y2 = 3*p1*y2;
00599         double p2y2 = p2*y2;
00600 
00601         double x = (point.x - u0)*_fx;
00602         double x2 = x*x;
00603         double r2 = x2 + y2;
00604         double d = 1 + (k1 + k2*r2)*r2;
00605 
00606         point.x = fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0;
00607         point.y = fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0;
00608 */
00609 }
00610 
00611 void Camera::Distort(CvPoint2D32f & point) 
00612 {
00613 /*
00614         double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy
00615         double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1);
00616         double _fx = 1./fx, _fy = 1./fy;
00617         double* k = calib_D.data.db;
00618 
00619         double k1 = k[0], k2 = k[1];
00620         double p1 = k[2], p2 = k[3];
00621 
00622         // Distort
00623         double y = (point.y - v0)*_fy;
00624         double y2 = y*y;
00625         double _2p1y = 2*p1*y;
00626         double _3p1y2 = 3*p1*y2;
00627         double p2y2 = p2*y2;
00628 
00629         double x = (point.x - u0)*_fx;
00630         double x2 = x*x;
00631         double r2 = x2 + y2;
00632         double d = 1 + (k1 + k2*r2)*r2;
00633 
00634         point.x = float(fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0);
00635         point.y = float(fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0);
00636 */
00637 }
00638 
00639 void Camera::CalcExteriorOrientation(vector<CvPoint3D64f>& pw, vector<CvPoint2D64f>& pi,
00640                                         Pose *pose)
00641 {
00642         double ext_rodriques[3];
00643         double ext_translate[3];
00644         CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
00645         CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
00646         CvMat *object_points = cvCreateMat((int)pw.size(), 1, CV_32FC3);
00647         CvMat *image_points = cvCreateMat((int)pi.size(), 1, CV_32FC2);
00648         for (size_t i=0; i<pw.size(); i++) {
00649                 object_points->data.fl[i*3+0] = (float)pw[i].x;
00650                 object_points->data.fl[i*3+1] = (float)pw[i].y;
00651                 object_points->data.fl[i*3+2] = (float)pw[i].z;
00652                 image_points->data.fl[i*2+0]  = (float)pi[i].x;
00653                 image_points->data.fl[i*2+1]  = (float)pi[i].y;
00654         }
00655         //cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, &calib_D, &ext_rodriques_mat, &ext_translate_mat);
00656         cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, NULL, &ext_rodriques_mat, &ext_translate_mat);
00657     pose->SetRodriques(&ext_rodriques_mat);
00658         pose->SetTranslation(&ext_translate_mat);
00659         cvReleaseMat(&object_points);
00660         cvReleaseMat(&image_points);
00661 }
00662 
00663 void Camera::CalcExteriorOrientation(vector<CvPoint3D64f>& pw, vector<PointDouble >& pi,
00664                                         CvMat *rodriques, CvMat *tra)
00665 {
00666         //assert(pw.size() == pi.size());
00667 
00668         int size = (int)pi.size();
00669 
00670         CvPoint3D64f *world_pts = new CvPoint3D64f[size];
00671         CvPoint2D64f *image_pts = new CvPoint2D64f[size];
00672 
00673         for(int i = 0; i < size; i++){
00674                 world_pts[i].x = pw[i].x;
00675                 world_pts[i].y = pw[i].y;
00676                 world_pts[i].z = pw[i].z;
00677                 // flip image points! Why???
00678                 //image_pts[i].x = x_res - pi[i].x;
00679                 //image_pts[i].y = y_res - pi[i].y;
00680                 image_pts[i].x = pi[i].x;
00681                 image_pts[i].y = pi[i].y;
00682         }
00683 
00684         double rot[3]; // rotation vector
00685         CvMat world_mat, image_mat, rot_vec;
00686         cvInitMatHeader(&world_mat, size, 1, CV_64FC3, world_pts);
00687         cvInitMatHeader(&image_mat, size, 1, CV_64FC2, image_pts);
00688         cvInitMatHeader(&rot_vec, 3, 1, CV_64FC1, rot);
00689 
00690         cvZero(tra);
00691         //cvmodFindExtrinsicCameraParams2(&world_mat, &image_mat, &calib_K, &calib_D, rodriques, tra, error);
00692         cvFindExtrinsicCameraParams2(&world_mat, &image_mat, &calib_K, &calib_D, rodriques, tra);
00693         
00694         delete[] world_pts;
00695         delete[] image_pts;
00696 }
00697 
00698 void Camera::CalcExteriorOrientation(vector<PointDouble >& pw, vector<PointDouble >& pi,
00699                                         CvMat *rodriques, CvMat *tra)
00700 {
00701         //assert(pw.size() == pi.size());
00702         int size = (int)pi.size();
00703 
00704         vector<CvPoint3D64f> pw3;
00705         pw3.resize(size);
00706         for (int i=0; i<size; i++) {
00707                 pw3[i].x = pw[i].x;
00708                 pw3[i].y = pw[i].y;
00709                 pw3[i].z = 0;
00710         }
00711 
00712         CalcExteriorOrientation(pw3, pi, rodriques, tra);
00713 }
00714 
00715 void Camera::CalcExteriorOrientation(vector<PointDouble>& pw, vector<PointDouble >& pi, Pose *pose)
00716 {
00717         double ext_rodriques[3];
00718         double ext_translate[3];
00719         CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
00720         CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
00721         CalcExteriorOrientation(pw, pi, &ext_rodriques_mat, &ext_translate_mat);
00722         pose->SetRodriques(&ext_rodriques_mat);
00723         pose->SetTranslation(&ext_translate_mat);
00724 }
00725 
00726 bool Camera::CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, CvMat *rodriques, CvMat *tra) {
00727         cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, &calib_D, rodriques, tra);
00728         return true;
00729 }
00730 
00731 bool Camera::CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, Pose *pose) {
00732         double ext_rodriques[3];
00733         double ext_translate[3];
00734         CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
00735         CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
00736         bool ret = CalcExteriorOrientation(object_points, image_points, &ext_rodriques_mat, &ext_translate_mat);
00737         pose->SetRodriques(&ext_rodriques_mat);
00738         pose->SetTranslation(&ext_translate_mat);
00739         return ret;
00740 }
00741 
00742 void Camera::ProjectPoints(vector<CvPoint3D64f>& pw, Pose *pose, vector<CvPoint2D64f>& pi) const {
00743         double ext_rodriques[3];
00744         double ext_translate[3];
00745         CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
00746         CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
00747         pose->GetRodriques(&ext_rodriques_mat);
00748         pose->GetTranslation(&ext_translate_mat);
00749         CvMat *object_points = cvCreateMat((int)pw.size(), 1, CV_32FC3);
00750         CvMat *image_points = cvCreateMat((int)pi.size(), 1, CV_32FC2);
00751         for (size_t i=0; i<pw.size(); i++) {
00752                 object_points->data.fl[i*3+0] = (float)pw[i].x;
00753                 object_points->data.fl[i*3+1] = (float)pw[i].y;
00754                 object_points->data.fl[i*3+2] = (float)pw[i].z;
00755         }
00756         cvProjectPoints2(object_points, &ext_rodriques_mat, &ext_translate_mat, &calib_K, &calib_D, image_points);  
00757         for (size_t i=0; i<pw.size(); i++) {
00758                 pi[i].x = image_points->data.fl[i*2+0];
00759                 pi[i].y = image_points->data.fl[i*2+1];
00760         }
00761         cvReleaseMat(&object_points);
00762         cvReleaseMat(&image_points);
00763 }
00764 
00765 void Camera::ProjectPoints(const CvMat* object_points, const CvMat* rotation_vector,
00766                                    const CvMat* translation_vector, CvMat* image_points) const
00767 {
00768         // Project points
00769         cvProjectPoints2(object_points, rotation_vector, translation_vector, &calib_K, &calib_D, image_points);  
00770 }
00771 
00772 void Camera::ProjectPoints(const CvMat* object_points, const Pose* pose, CvMat* image_points) const
00773 {
00774         double ext_rodriques[3];
00775         double ext_translate[3];
00776         CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
00777         CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
00778         pose->GetRodriques(&ext_rodriques_mat);
00779         pose->GetTranslation(&ext_translate_mat);
00780         cvProjectPoints2(object_points, &ext_rodriques_mat, &ext_translate_mat, &calib_K, &calib_D, image_points);  
00781 }
00782 
00783 void Camera::ProjectPoints(const CvMat* object_points, double gl[16], CvMat* image_points) const
00784 {
00785         double glm[4][4] = {
00786                 gl[0], gl[4], gl[8],  gl[12],
00787                 gl[1], gl[5], gl[9],  gl[13],
00788                 gl[2], gl[6], gl[10], gl[14],
00789                 gl[3], gl[7], gl[11], gl[15],
00790         };
00791         CvMat glm_mat = cvMat(4, 4, CV_64F, glm);
00792         
00793         // For some reason we need to mirror both y and z ???
00794         double cv_mul_data[4][4];
00795         CvMat cv_mul = cvMat(4, 4, CV_64F, cv_mul_data);
00796         cvSetIdentity(&cv_mul);
00797         cvmSet(&cv_mul, 1, 1, -1);
00798         cvmSet(&cv_mul, 2, 2, -1);
00799         cvMatMul(&cv_mul, &glm_mat, &glm_mat);
00800         
00801         // Rotation
00802         Rotation r;
00803         r.SetMatrix(&glm_mat);
00804         double rod[3]; 
00805         CvMat rod_mat=cvMat(3, 1, CV_64F, rod);
00806         r.GetRodriques(&rod_mat);
00807         // Translation
00808         double tra[3] = { glm[0][3], glm[1][3], glm[2][3] };
00809         CvMat tra_mat = cvMat(3, 1, CV_64F, tra);
00810         // Project points
00811         ProjectPoints(object_points, &rod_mat, &tra_mat, image_points);
00812 }
00813 
00814 void Camera::ProjectPoint(const CvPoint3D64f pw, const Pose *pose, CvPoint2D64f &pi) const {
00815         float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z};
00816         float image_points_data[2] = {0};
00817         CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data);
00818         CvMat image_points = cvMat(1, 1, CV_32FC2, image_points_data);
00819         ProjectPoints(&object_points, pose, &image_points);
00820         pi.x = image_points.data.fl[0];
00821         pi.y = image_points.data.fl[1];
00822 }
00823 
00824 void Camera::ProjectPoint(const CvPoint3D32f pw, const Pose *pose, CvPoint2D32f &pi) const {
00825         float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z};
00826         float image_points_data[2] = {0};
00827         CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data);
00828         CvMat image_points = cvMat(1, 1, CV_32FC2, image_points_data);
00829         ProjectPoints(&object_points, pose, &image_points);
00830         pi.x = image_points.data.fl[0];
00831         pi.y = image_points.data.fl[1];
00832 }
00833 
00834 Homography::Homography() {
00835         cvInitMatHeader(&H, 3, 3, CV_64F, H_data);
00836 }
00837 
00838 void Homography::Find(const vector<PointDouble  >& pw, const vector<PointDouble  >& pi)
00839 {
00840         assert(pw.size() == pi.size());
00841         int size = (int)pi.size();
00842 
00843         CvPoint2D64f *srcp = new CvPoint2D64f[size];
00844         CvPoint2D64f *dstp = new CvPoint2D64f[size];
00845 
00846         for(int i = 0; i < size; ++i){
00847                 srcp[i].x = pw[i].x;
00848                 srcp[i].y = pw[i].y;
00849 
00850                 dstp[i].x = pi[i].x;
00851                 dstp[i].y = pi[i].y;
00852         }
00853         
00854         CvMat src_pts, dst_pts;
00855         cvInitMatHeader(&dst_pts, 1, size, CV_64FC2, dstp);
00856         cvInitMatHeader(&src_pts, 1, size, CV_64FC2, srcp);
00857 
00858 #ifdef OPENCV11
00859         cvFindHomography(&src_pts, &dst_pts, &H, 0, 0, 0);
00860 #else
00861         cvFindHomography(&src_pts, &dst_pts, &H);
00862 #endif
00863 
00864         delete[] srcp;
00865         delete[] dstp;
00866 }
00867 
00868 void Homography::ProjectPoints(const vector<PointDouble>& from, vector<PointDouble>& to) 
00869 {
00870         int size = (int)from.size();
00871 
00872         CvPoint3D64f *srcp = new CvPoint3D64f[size];
00873 
00874         for(int i = 0; i < size; ++i){
00875                 srcp[i].x = from[i].x;
00876                 srcp[i].y = from[i].y;
00877                 srcp[i].z = 1;
00878         }
00879 
00880         CvPoint3D64f *dstp = new CvPoint3D64f[size];
00881 
00882         CvMat src_pts, dst_pts;
00883         cvInitMatHeader(&src_pts, 1, size, CV_64FC3, srcp);
00884         cvInitMatHeader(&dst_pts, 1, size, CV_64FC3, dstp);
00885         
00886         cvTransform(&src_pts, &dst_pts, &H);
00887 
00888         to.clear();
00889         for(int i = 0; i < size; ++i)
00890         {       
00891                 PointDouble pt;
00892                 pt.x = dstp[i].x / dstp[i].z;
00893                 pt.y = dstp[i].y / dstp[i].z;
00894                 
00895                 to.push_back(pt);
00896         }
00897         
00898         delete[] srcp;
00899         delete[] dstp;
00900 }
00901 
00902 } // namespace alvar


ar_track_alvar
Author(s): Scott Niekum
autogenerated on Thu Jun 6 2019 21:12:54