TrackerOrientation.cpp
Go to the documentation of this file.
00001 /*
00002  * This file is part of ALVAR, A Library for Virtual and Augmented Reality.
00003  *
00004  * Copyright 2007-2012 VTT Technical Research Centre of Finland
00005  *
00006  * Contact: VTT Augmented Reality Team <alvar.info@vtt.fi>
00007  *          <http://www.vtt.fi/multimedia/alvar.html>
00008  *
00009  * ALVAR is free software; you can redistribute it and/or modify it under the
00010  * terms of the GNU Lesser General Public License as published by the Free
00011  * Software Foundation; either version 2.1 of the License, or (at your option)
00012  * any later version.
00013  *
00014  * This library is distributed in the hope that it will be useful, but WITHOUT
00015  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
00016  * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
00017  * for more details.
00018  *
00019  * You should have received a copy of the GNU Lesser General Public License
00020  * along with ALVAR; if not, see
00021  * <http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html>.
00022  */
00023 
00024 #include "TrackerOrientation.h"
00025 #include "Optimization.h"
00026 
00027 using namespace std;
00028 
00029 namespace alvar {
00030 using namespace std;
00031 
00032 void TrackerOrientation::Project(CvMat* state, CvMat* projection, void *param)
00033 {
00034         TrackerOrientation *tracker = (TrackerOrientation*)param;
00035         int count = projection->rows;
00036         CvMat rot_mat = cvMat(3, 1, CV_64F, &(state->data.db[0+0]));
00037         double zeros[3] = {0};
00038         CvMat zero_tra = cvMat(3, 1, CV_64F, zeros);
00039         cvReshape(projection, projection, 2, 1);
00040         cvProjectPoints2(tracker->_object_model, &rot_mat, &zero_tra, &(tracker->_camera->calib_K), &(tracker->_camera->calib_D), projection);
00041         cvReshape(projection, projection, 1, count);
00042 }
00043 
00044 void TrackerOrientation::Reset()
00045 {       
00046         _pose.Reset();
00047         _pose.Mirror(false, true, true);
00048 }
00049 
00050 // Pose difference is stored...
00051 double TrackerOrientation::Track(IplImage *image)
00052 {
00053         UpdateRotationOnly(image, 0);
00054         return 0;       
00055 }
00056 
00057 bool TrackerOrientation::UpdatePose(IplImage *image)
00058 {
00059         int count_points = _F_v.size();
00060         if(count_points < 6) return false;
00061 
00062         CvMat* _M = cvCreateMat(count_points, 1, CV_64FC3);
00063         CvMat* image_observations = cvCreateMat(count_points*2, 1, CV_64F); // [u v u v u v ...]'
00064 
00065         //map<int,Feature>::iterator it;
00066         int ind = 0;
00067         for(map<int,Feature>::iterator it=_F_v.begin(); it!=_F_v.end(); it++)   
00068         {
00069                 if((it->second.status3D  == Feature::USE_FOR_POSE || 
00070                         it->second.status3D  == Feature::IS_INITIAL)  && 
00071                         it->second.status2D  == Feature::IS_TRACKED)
00072                 {
00073                         _M->data.db[ind*3+0] = it->second.point3d.x;
00074                         _M->data.db[ind*3+1] = it->second.point3d.y;
00075                         _M->data.db[ind*3+2] = it->second.point3d.z;
00076 
00077                         image_observations->data.db[ind*2+0] = it->second.point.x;
00078                         image_observations->data.db[ind*2+1] = it->second.point.y;
00079                         ind++;
00080                 }
00081         }
00082 
00083         if(ind < 6)
00084         {                       
00085                 cvReleaseMat(&image_observations);
00086                 cvReleaseMat(&_M);
00087                 return false;           
00088         }
00089 
00090         double rot[3]; CvMat rotm = cvMat(3, 1, CV_64F, rot);
00091         _pose.GetRodriques(&rotm);
00092 
00093         CvMat* par = cvCreateMat(3, 1, CV_64F);
00094         memcpy(&(par->data.db[0+0]), rot, 3*sizeof(double));
00095         //par->data.db[3] = 0;
00096 
00097         CvRect r; r.x = 0; r.y = 0; r.height = ind; r.width = 1;
00098         CvMat Msub;
00099         cvGetSubRect(_M, &Msub, r);
00100         _object_model = &Msub;
00101 
00102         r.height = 2*ind;
00103         CvMat image_observations_sub;
00104         cvGetSubRect(image_observations, &image_observations_sub, r);
00105 
00106         alvar::Optimization *opt = new alvar::Optimization(3, 2*ind);
00107         
00108         double foo = opt->Optimize(par, &image_observations_sub, 0.0005, 5, Project, this, alvar::Optimization::TUKEY_LM);
00109         memcpy(rot, &(par->data.db[0+0]), 3*sizeof(double));
00110         _pose.SetRodriques(&rotm);
00111         
00112         delete opt;
00113 
00114         cvReleaseMat(&par);
00115         cvReleaseMat(&image_observations);
00116         cvReleaseMat(&_M);
00117 
00118         return true;
00119 }
00120 
00121 bool TrackerOrientation::UpdateRotationOnly(IplImage *gray, IplImage *image)
00122 {
00123         if(gray->nChannels != 1) return false;
00124         if(!_grsc) _grsc = cvCreateImage(cvSize(_xres, _yres), 8, 1);
00125         if((_xres!=_grsc->width)||(_yres!=_grsc->height))
00126                 cvResize(gray, _grsc);
00127         else
00128                 _grsc->imageData =  gray->imageData;
00129 
00130         map<int,Feature>::iterator it ;
00131         for(it = _F_v.begin(); it != _F_v.end(); it++)
00132                 it->second.status2D = Feature::NOT_TRACKED;
00133 
00134         // Track features in image domain (distorted points)
00135         _ft.Track(_grsc);
00136 
00137         // Go through image features and match to previous (_F_v)
00138         for (int i = 0; i < _ft.feature_count; i++)
00139         {
00140                 int id            = _ft.ids[i];
00141                 _F_v[id].point    = _ft.features[i];
00142                 _F_v[id].point.x *= _image_scale;
00143                 _F_v[id].point.y *= _image_scale;
00144                 _F_v[id].status2D = Feature::IS_TRACKED;
00145                 
00146                 // Throw outlier away
00147                 if(_F_v[id].status3D == Feature::IS_OUTLIER)
00148                 {
00149                         _ft.DelFeature(i);
00150                 }                       
00151         }
00152 
00153         // Delete features that are not tracked
00154 //              map<int,Feature>::iterator 
00155         it = _F_v.begin();
00156         while(it != _F_v.end())
00157         {
00158                 if(it->second.status2D == Feature::NOT_TRACKED)
00159                         _F_v.erase(it++);
00160                 else ++it;
00161         }
00162         
00163         // Update pose based on current information
00164         UpdatePose();
00165 
00166         it = _F_v.begin();
00167         while(it != _F_v.end())
00168         {
00169                 Feature *f = &(it->second);
00170 
00171                 // Add new points
00172                 if(f->status3D == Feature::NONE)
00173                 {
00174                         double wx, wy, wz;
00175                         
00176                         CvPoint2D32f fpu = f->point;
00177                         _camera->Undistort(fpu);
00178                         
00179                         // Tassa asetetaan z = inf ja lasketaan x ja y jotenkin?!?
00180                         int object_scale = 1; // TODO Same as the pose?!?!?!?
00181                                                         
00182                         // inv(K)*[u v 1]'*scale
00183                         wx =  object_scale*(fpu.x-_camera->calib_K_data[0][2])/_camera->calib_K_data[0][0];
00184                         wy =  object_scale*(fpu.y-_camera->calib_K_data[1][2])/_camera->calib_K_data[1][1];
00185                         wz =  object_scale;
00186 
00187                         // Now the points are in camera coordinate frame.
00188                         alvar::Pose p = _pose;
00189                         p.Invert();
00190                         
00191                         double Xd[4] = {wx, wy, wz, 1};
00192                         CvMat Xdm = cvMat(4, 1, CV_64F, Xd);
00193                         double Pd[16];
00194                         CvMat Pdm = cvMat(4, 4, CV_64F, Pd);
00195                         p.GetMatrix(&Pdm);
00196                         cvMatMul(&Pdm, &Xdm, &Xdm);
00197                         f->point3d.x = Xd[0]/Xd[3];
00198                         f->point3d.y = Xd[1]/Xd[3];
00199                         f->point3d.z = Xd[2]/Xd[3];
00200                         //cout<<f->point3d.z<<endl;
00201 
00202                         f->status3D = Feature::USE_FOR_POSE;
00203                 }
00204 
00205                 if(image)
00206                 {
00207                         if(f->status3D == Feature::NONE)
00208                                 cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, CV_RGB(255,0,0), 1);
00209                         else if(f->status3D == Feature::USE_FOR_POSE)
00210                                 cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, CV_RGB(0,255,0), 1);
00211                         else if(f->status3D == Feature::IS_INITIAL)
00212                                 cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, CV_RGB(0,0,255), 1);
00213                         else if(f->status3D == Feature::IS_OUTLIER)
00214                                 cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 2, CV_RGB(255,0,255), 1);
00215                 }
00216 
00217                 // Delete points that bk error is too big
00218                 // OK here just change state...
00219                 // NYT TEHAAN TURHAAN ASKEN MUKAAN OTETUILLE..
00220                 if( f->status3D == Feature::USE_FOR_POSE || 
00221                         f->status3D == Feature::IS_INITIAL )
00222                 {                               
00223                         double p3d[3] = {f->point3d.x, f->point3d.y, f->point3d.z};
00224                         CvMat p3dm = cvMat(1, 1, CV_64FC3, p3d);
00225                         double p2d[2];
00226                         CvMat p2dm = cvMat(2, 1, CV_64F, p2d);
00227                         cvReshape(&p2dm, &p2dm, 2, 1);
00228                         
00229                         double gl_mat[16];
00230                         _pose.GetMatrixGL(gl_mat);
00231                         _camera->ProjectPoints(&p3dm, gl_mat, &p2dm);
00232 
00233                         if(image)
00234                                 cvLine(image, cvPoint(int(p2d[0]), int(p2d[1])), cvPoint(int(f->point.x),int(f->point.y)), CV_RGB(255,0,255));
00235 
00236                         double dist = (p2d[0]-f->point.x)*(p2d[0]-f->point.x)+(p2d[1]-f->point.y)*(p2d[1]-f->point.y);
00237                         if(dist > _outlier_limit)
00238                                 f->status3D = Feature::IS_OUTLIER;
00239                 }
00240 
00241                 it++;
00242         }
00243         return true;
00244 }
00245 
00246 } // namespace alvar


ar_track_alvar
Author(s): Scott Niekum
autogenerated on Sun Oct 5 2014 22:16:26