EC.cpp
Go to the documentation of this file.
00001 /*
00002  * This file is part of ALVAR, A Library for Virtual and Augmented Reality.
00003  *
00004  * Copyright 2007-2012 VTT Technical Research Centre of Finland
00005  *
00006  * Contact: VTT Augmented Reality Team <alvar.info@vtt.fi>
00007  *          <http://www.vtt.fi/multimedia/alvar.html>
00008  *
00009  * ALVAR is free software; you can redistribute it and/or modify it under the
00010  * terms of the GNU Lesser General Public License as published by the Free
00011  * Software Foundation; either version 2.1 of the License, or (at your option)
00012  * any later version.
00013  *
00014  * This library is distributed in the hope that it will be useful, but WITHOUT
00015  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
00016  * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
00017  * for more details.
00018  *
00019  * You should have received a copy of the GNU Lesser General Public License
00020  * along with ALVAR; if not, see
00021  * <http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html>.
00022  */
00023 
00024 #include "EC.h"
00025 #include "Optimization.h"
00026 
00027 namespace alvar {
00028 
00029 struct ProjectParams
00030 {
00031         Camera *camera;
00032         const CvMat *object_model;
00033 };
00034 
00035 static void ProjectRot(CvMat* state, CvMat* projection, void* x) 
00036 {
00037         ProjectParams *foo = (ProjectParams*)x;
00038         Camera *camera = foo->camera;
00039         const CvMat *object_model = foo->object_model;
00040         int count = projection->rows;
00041         CvMat rot_mat = cvMat(3, 1, CV_64F, &(state->data.db[0+0]));
00042         double zeros[3] = {0};
00043         CvMat zero_tra = cvMat(3, 1, CV_64F, zeros);
00044         cvReshape(projection, projection, 2, 1);
00045         cvProjectPoints2(object_model, &rot_mat, &zero_tra, &(camera->calib_K), &(camera->calib_D), projection);
00046         cvReshape(projection, projection, 1, count);
00047 }
00048 
00049 // TODO: How this differs from the Camera::ProjectPoints ???
00050 static void Project(CvMat* state, CvMat* projection, void* x)
00051 {
00052         ProjectParams *foo = (ProjectParams*)x;
00053         Camera *camera = foo->camera;
00054         const CvMat *object_model = foo->object_model;
00055         int count = projection->rows;
00056         CvMat rot_mat = cvMat(3, 1, CV_64F, &(state->data.db[0+0]));
00057         CvMat tra_mat = cvMat(3, 1, CV_64F, &(state->data.db[0+3]));
00058         cvReshape(projection, projection, 2, 1);
00059         cvProjectPoints2(object_model, &rot_mat, &tra_mat, &(camera->calib_K), &(camera->calib_D), projection);
00060         cvReshape(projection, projection, 1, count);
00061 }
00062 
00063 bool CameraEC::UpdatePose(const CvMat* object_points, CvMat* image_points, Pose *pose, CvMat *weights) {
00064         double rot[3]; CvMat rotm = cvMat(3, 1, CV_64F, rot);
00065         double tra[3]; CvMat tram = cvMat(3, 1, CV_64F, tra);
00066         pose->GetRodriques(&rotm);
00067         pose->GetTranslation(&tram);
00068         bool ret = UpdatePose(object_points, image_points, &rotm, &tram, weights);
00069         pose->SetRodriques(&rotm);
00070         pose->SetTranslation(&tram);
00071         return ret;
00072 }
00073 
00074 bool CameraEC::UpdatePose(const CvMat* object_points, CvMat* image_points, CvMat *rot, CvMat *tra, CvMat *weights) {
00075         if (object_points->height < 4) return false;
00076         /*      if (object_points->height < 6) {
00077                 return false;
00078                 // TODO: We need to change image_points into CV_32FC2
00079                 return Camera::CalcExteriorOrientation(object_points, image_points, rot, tra);
00080         }*/
00081         CvMat* par = cvCreateMat(6, 1, CV_64F);
00082         memcpy(&(par->data.db[0+0]), rot->data.db, 3*sizeof(double));
00083         memcpy(&(par->data.db[0+3]), tra->data.db, 3*sizeof(double));
00084 
00085         ProjectParams pparams;
00086         pparams.camera = this;
00087         pparams.object_model  = object_points;
00088 
00089         alvar::Optimization *opt = new alvar::Optimization(6, image_points->height);
00090         double tmp = opt->Optimize(par, image_points, 0.0005, 2, Project, &pparams, alvar::Optimization::TUKEY_LM, 0, 0, weights);
00091 
00092         memcpy(rot->data.db, &(par->data.db[0+0]), 3*sizeof(double));
00093         memcpy(tra->data.db, &(par->data.db[0+3]), 3*sizeof(double));
00094         
00095         delete opt;
00096 
00097         cvReleaseMat(&par);
00098         return true;
00099 }
00100 
00101 bool CameraEC::UpdateRotation(const CvMat* object_points, CvMat* image_points, Pose *pose)
00102 {
00103         double rot[3]; CvMat rotm = cvMat(3, 1, CV_64F, rot);
00104         double tra[3]; CvMat tram = cvMat(3, 1, CV_64F, tra);
00105         pose->GetRodriques(&rotm);
00106         pose->GetTranslation(&tram);
00107         bool ret = UpdateRotation(object_points, image_points, &rotm, &tram);
00108         pose->SetRodriques(&rotm);
00109         pose->SetTranslation(&tram);
00110         return ret;     
00111 }
00112 
00113 bool CameraEC::UpdateRotation(const CvMat* object_points, CvMat* image_points, CvMat *rot, CvMat *tra) {
00114 
00115         CvMat* par = cvCreateMat(3, 1, CV_64F);
00116         memcpy(&(par->data.db[0+0]), rot->data.db, 3*sizeof(double));
00117         ProjectParams pparams;
00118         pparams.camera = this;
00119         pparams.object_model  = object_points;
00120         alvar::Optimization *opt = new alvar::Optimization(3, image_points->height);
00121         double tmp = opt->Optimize(par, image_points, 0.0005, 2, ProjectRot, &pparams, alvar::Optimization::TUKEY_LM);
00122         memcpy(rot->data.db, &(par->data.db[0+0]), 3*sizeof(double));
00123         delete opt;
00124         cvReleaseMat(&par);
00125         return true;
00126 }
00127 
00128 // Ol etta mirror asia on kunnossa
00129 void GetOrigo(Pose* pose, CvMat* O)
00130 {
00131         pose->GetTranslation(O);
00132 }
00133 
00134 void GetPointOnLine(const Pose* pose, Camera *camera, const CvPoint2D32f *u, CvMat* P)
00135 {
00136         double kid[9], rotd[9], trad[3], ud[3] = {u->x, u->y, 1};
00137         CvMat Ki = cvMat(3, 3, CV_64F, kid);
00138         CvMat R = cvMat(3, 3, CV_64F, rotd);
00139         CvMat T = cvMat(3, 1, CV_64F, trad);
00140         CvMat U = cvMat(3, 1, CV_64F, ud);
00141         pose->GetMatrix(&R);
00142         pose->GetTranslation(&T);
00143         cvInv(&(camera->calib_K), &Ki);
00144         cvMatMul(&R, &Ki, &Ki);
00145         cvGEMM(&Ki, &U, 1, &T, 1, P, 0);                                        
00146 }
00147 
00148 bool MidPointAlgorithm(CvMat* o1, CvMat* o2, CvMat* p1, CvMat* p2, CvPoint3D32f& X, double limit)
00149 {
00150         double ud[3], vd[3], wd[3];
00151         CvMat u = cvMat(3, 1, CV_64F, ud);
00152         CvMat v = cvMat(3, 1, CV_64F, vd);
00153         CvMat w = cvMat(3, 1, CV_64F, wd);
00154         
00155         cvSub(p1, o1, &u);
00156         cvSub(p2, o2, &v);
00157         cvSub(o1, o2, &w);
00158 
00159         double a = cvDotProduct(&u, &u);
00160         double b = cvDotProduct(&u, &v);
00161         double c = cvDotProduct(&v, &v);
00162         double d = cvDotProduct(&u, &w);
00163         double e = cvDotProduct(&v, &w);
00164         double D = a*c - b*b;
00165         double sc, tc;
00166 
00167         // compute the line parameters of the two closest points
00168         if (D < limit)
00169         {   
00170                 return false;
00171                 // the lines are almost parallel
00172                 sc = 0.0;
00173                 tc = (b>c ? d/b : e/c);   // use the largest denominator
00174         }
00175         else
00176         {
00177                 sc = (b*e - c*d) / D;
00178                 tc = (a*e - b*d) / D;
00179         }
00180         
00181         double m1d[3], m2d[3];
00182         CvMat m1 = cvMat(3, 1, CV_64F, m1d);
00183         CvMat m2 = cvMat(3, 1, CV_64F, m2d);
00184         cvAddWeighted(&u, sc, o1, 1.0, 0.0, &m1);
00185         cvAddWeighted(&v, tc, o2, 1.0, 0.0, &m2);
00186         cvAddWeighted(&m1, 0.5, &m2, 0.5, 0.0, &m1);    
00187         
00188         X.x = (float)m1d[0];
00189         X.y = (float)m1d[1];
00190         X.z = (float)m1d[2];
00191 
00192         return true;
00193 }
00194 
00195 // todo
00196 bool CameraEC::ReconstructFeature(const Pose *pose1, const Pose *pose2, const CvPoint2D32f *u1, const CvPoint2D32f *u2, CvPoint3D32f *p3d, double limit) {
00197         double o1d[3], o2d[3], p1d[3], p2d[3];
00198         CvMat o1 = cvMat(3, 1, CV_64F, o1d);
00199         CvMat o2 = cvMat(3, 1, CV_64F, o2d);
00200         CvMat p1 = cvMat(3, 1, CV_64F, p1d);
00201         CvMat p2 = cvMat(3, 1, CV_64F, p2d);
00202 
00203         Pose po1 = *pose1; // Make copy so that we don't destroy the pose content
00204         Pose po2 = *pose2;
00205         po1.Invert();
00206         po2.Invert();
00207         GetOrigo(&po1, &o1);
00208         GetOrigo(&po2, &o2);
00209         GetPointOnLine(&po1, this, u1, &p1);
00210         GetPointOnLine(&po2, this, u2, &p2);
00211         
00212         return MidPointAlgorithm(&o1, &o2, &p1, &p2, *p3d, limit);
00213 }
00214 
00215 void CameraEC::Get3dOnPlane(const Pose *pose, CvPoint2D32f p2d, CvPoint3D32f &p3d) {
00216         double pd[16], md[9], kd[9];    
00217         CvMat P = cvMat(4, 4, CV_64F, pd);
00218         CvMat H = cvMat(3, 3, CV_64F, md);
00219         CvMat Ki = cvMat(3, 3, CV_64F, kd);
00220         
00221         pose->GetMatrix(&P);
00222         cvInv(&(calib_K), &Ki);
00223         
00224         // Construct homography from pose
00225         int ind_s = 0, ind_c = 0;
00226         for(int i = 0; i < 3; ++i)
00227         {
00228                 CvRect r; r.x = ind_s; r.y = 0; r.height = 3; r.width = 1;
00229                 CvMat sub = cvMat(3, 1, CV_64F);
00230                 cvGetSubRect(&P, &sub, r);
00231                 CvMat col = cvMat(3, 1, CV_64F);
00232                 cvGetCol(&H, &col, ind_c);
00233                 cvCopy(&sub, &col);
00234                 ind_c++;
00235                 ind_s++;
00236                 if(i == 1) ind_s++; 
00237         }
00238 
00239         // Apply H to get the 3D coordinates
00240         Camera::Undistort(p2d);
00241         double xd[3] = {p2d.x, p2d.y, 1};
00242         CvMat X = cvMat(3, 1, CV_64F, xd);
00243         cvMatMul(&Ki, &X, &X);
00244         cvInv(&H, &H);
00245         cvMatMul(&H, &X, &X);
00246 
00247         p3d.x = (float)(xd[0] / xd[2]);
00248         p3d.y = (float)(xd[1] / xd[2]);
00249         p3d.z = 0;
00250 }
00251 
00252 void CameraEC::Get3dOnDepth(const Pose *pose, CvPoint2D32f p2d, float depth, CvPoint3D32f &p3d)
00253 {
00254         double wx, wy, wz;
00255         Camera::Undistort(p2d);
00256         
00257         // Tassa asetetaan z = inf ja lasketaan x ja y jotenkin?!?
00258         //double object_scale = _dist_to_target; // TODO Same as the pose?!?!?!?
00259                                         
00260         // inv(K)*[u v 1]'*scale
00261         wx =  depth*(p2d.x-calib_K_data[0][2])/calib_K_data[0][0];
00262         wy =  depth*(p2d.y-calib_K_data[1][2])/calib_K_data[1][1];
00263         wz =  depth;
00264 
00265         // Now the points are in camera coordinate frame.
00266         alvar::Pose p = *pose;
00267         p.Invert();
00268         
00269         double Xd[4] = {wx, wy, wz, 1};
00270         CvMat Xdm = cvMat(4, 1, CV_64F, Xd);
00271         double Pd[16];
00272         CvMat Pdm = cvMat(4, 4, CV_64F, Pd);
00273         p.GetMatrix(&Pdm);
00274         cvMatMul(&Pdm, &Xdm, &Xdm);
00275         p3d.x = float(Xd[0]/Xd[3]);
00276         p3d.y = float(Xd[1]/Xd[3]);
00277         p3d.z = float(Xd[2]/Xd[3]);
00278 }
00279 
00280 } // namespace alvar
00281 


ar_track_alvar
Author(s): Scott Niekum
autogenerated on Thu Jun 6 2019 21:12:54