TrackerOrientation.cpp
Go to the documentation of this file.
1 /*
2  * This file is part of ALVAR, A Library for Virtual and Augmented Reality.
3  *
4  * Copyright 2007-2012 VTT Technical Research Centre of Finland
5  *
6  * Contact: VTT Augmented Reality Team <alvar.info@vtt.fi>
7  * <http://www.vtt.fi/multimedia/alvar.html>
8  *
9  * ALVAR is free software; you can redistribute it and/or modify it under the
10  * terms of the GNU Lesser General Public License as published by the Free
11  * Software Foundation; either version 2.1 of the License, or (at your option)
12  * any later version.
13  *
14  * This library is distributed in the hope that it will be useful, but WITHOUT
15  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16  * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
17  * for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public License
20  * along with ALVAR; if not, see
21  * <http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html>.
22  */
23 
24 #include "TrackerOrientation.h"
25 #include "Optimization.h"
26 
27 using namespace std;
28 
29 namespace alvar {
30 using namespace std;
31 
32 void TrackerOrientation::Project(CvMat* state, CvMat* projection, void *param)
33 {
35  int count = projection->rows;
36  CvMat rot_mat = cvMat(3, 1, CV_64F, &(state->data.db[0+0]));
37  double zeros[3] = {0};
38  CvMat zero_tra = cvMat(3, 1, CV_64F, zeros);
39  cvReshape(projection, projection, 2, 1);
40  cvProjectPoints2(tracker->_object_model, &rot_mat, &zero_tra, &(tracker->_camera->calib_K), &(tracker->_camera->calib_D), projection);
41  cvReshape(projection, projection, 1, count);
42 }
43 
44 void TrackerOrientation::Reset()
45 {
46  _pose.Reset();
47  _pose.Mirror(false, true, true);
48 }
49 
50 // Pose difference is stored...
51 double TrackerOrientation::Track(IplImage *image)
52 {
53  UpdateRotationOnly(image, 0);
54  return 0;
55 }
56 
57 bool TrackerOrientation::UpdatePose(IplImage *image)
58 {
59  int count_points = _F_v.size();
60  if(count_points < 6) return false;
61 
62  CvMat* _M = cvCreateMat(count_points, 1, CV_64FC3);
63  CvMat* image_observations = cvCreateMat(count_points*2, 1, CV_64F); // [u v u v u v ...]'
64 
65  //map<int,Feature>::iterator it;
66  int ind = 0;
67  for(map<int,Feature>::iterator it=_F_v.begin(); it!=_F_v.end(); it++)
68  {
69  if((it->second.status3D == Feature::USE_FOR_POSE ||
70  it->second.status3D == Feature::IS_INITIAL) &&
71  it->second.status2D == Feature::IS_TRACKED)
72  {
73  _M->data.db[ind*3+0] = it->second.point3d.x;
74  _M->data.db[ind*3+1] = it->second.point3d.y;
75  _M->data.db[ind*3+2] = it->second.point3d.z;
76 
77  image_observations->data.db[ind*2+0] = it->second.point.x;
78  image_observations->data.db[ind*2+1] = it->second.point.y;
79  ind++;
80  }
81  }
82 
83  if(ind < 6)
84  {
85  cvReleaseMat(&image_observations);
86  cvReleaseMat(&_M);
87  return false;
88  }
89 
90  double rot[3]; CvMat rotm = cvMat(3, 1, CV_64F, rot);
91  _pose.GetRodriques(&rotm);
92 
93  CvMat* par = cvCreateMat(3, 1, CV_64F);
94  memcpy(&(par->data.db[0+0]), rot, 3*sizeof(double));
95  //par->data.db[3] = 0;
96 
97  CvRect r; r.x = 0; r.y = 0; r.height = ind; r.width = 1;
98  CvMat Msub;
99  cvGetSubRect(_M, &Msub, r);
100  _object_model = &Msub;
101 
102  r.height = 2*ind;
103  CvMat image_observations_sub;
104  cvGetSubRect(image_observations, &image_observations_sub, r);
105 
106  alvar::Optimization *opt = new alvar::Optimization(3, 2*ind);
107 
108  double foo = opt->Optimize(par, &image_observations_sub, 0.0005, 5, Project, this, alvar::Optimization::TUKEY_LM);
109  memcpy(rot, &(par->data.db[0+0]), 3*sizeof(double));
110  _pose.SetRodriques(&rotm);
111 
112  delete opt;
113 
114  cvReleaseMat(&par);
115  cvReleaseMat(&image_observations);
116  cvReleaseMat(&_M);
117 
118  return true;
119 }
120 
121 bool TrackerOrientation::UpdateRotationOnly(IplImage *gray, IplImage *image)
122 {
123  if(gray->nChannels != 1) return false;
124  if(!_grsc) _grsc = cvCreateImage(cvSize(_xres, _yres), 8, 1);
125  if((_xres!=_grsc->width)||(_yres!=_grsc->height))
126  cvResize(gray, _grsc);
127  else
128  _grsc->imageData = gray->imageData;
129 
130  map<int,Feature>::iterator it ;
131  for(it = _F_v.begin(); it != _F_v.end(); it++)
132  it->second.status2D = Feature::NOT_TRACKED;
133 
134  // Track features in image domain (distorted points)
135  _ft.Track(_grsc);
136 
137  // Go through image features and match to previous (_F_v)
138  for (int i = 0; i < _ft.feature_count; i++)
139  {
140  int id = _ft.ids[i];
141  _F_v[id].point = _ft.features[i];
142  _F_v[id].point.x *= _image_scale;
143  _F_v[id].point.y *= _image_scale;
144  _F_v[id].status2D = Feature::IS_TRACKED;
145 
146  // Throw outlier away
147  if(_F_v[id].status3D == Feature::IS_OUTLIER)
148  {
149  _ft.DelFeature(i);
150  }
151  }
152 
153  // Delete features that are not tracked
154 // map<int,Feature>::iterator
155  it = _F_v.begin();
156  while(it != _F_v.end())
157  {
158  if(it->second.status2D == Feature::NOT_TRACKED)
159  _F_v.erase(it++);
160  else ++it;
161  }
162 
163  // Update pose based on current information
164  UpdatePose();
165 
166  it = _F_v.begin();
167  while(it != _F_v.end())
168  {
169  Feature *f = &(it->second);
170 
171  // Add new points
172  if(f->status3D == Feature::NONE)
173  {
174  double wx, wy, wz;
175 
176  CvPoint2D32f fpu = f->point;
177  _camera->Undistort(fpu);
178 
179  // Tassa asetetaan z = inf ja lasketaan x ja y jotenkin?!?
180  int object_scale = 1; // TODO Same as the pose?!?!?!?
181 
182  // inv(K)*[u v 1]'*scale
183  wx = object_scale*(fpu.x-_camera->calib_K_data[0][2])/_camera->calib_K_data[0][0];
184  wy = object_scale*(fpu.y-_camera->calib_K_data[1][2])/_camera->calib_K_data[1][1];
185  wz = object_scale;
186 
187  // Now the points are in camera coordinate frame.
188  alvar::Pose p = _pose;
189  p.Invert();
190 
191  double Xd[4] = {wx, wy, wz, 1};
192  CvMat Xdm = cvMat(4, 1, CV_64F, Xd);
193  double Pd[16];
194  CvMat Pdm = cvMat(4, 4, CV_64F, Pd);
195  p.GetMatrix(&Pdm);
196  cvMatMul(&Pdm, &Xdm, &Xdm);
197  f->point3d.x = Xd[0]/Xd[3];
198  f->point3d.y = Xd[1]/Xd[3];
199  f->point3d.z = Xd[2]/Xd[3];
200  //cout<<f->point3d.z<<endl;
201 
202  f->status3D = Feature::USE_FOR_POSE;
203  }
204 
205  if(image)
206  {
207  if(f->status3D == Feature::NONE)
208  cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, CV_RGB(255,0,0), 1);
209  else if(f->status3D == Feature::USE_FOR_POSE)
210  cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, CV_RGB(0,255,0), 1);
211  else if(f->status3D == Feature::IS_INITIAL)
212  cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, CV_RGB(0,0,255), 1);
213  else if(f->status3D == Feature::IS_OUTLIER)
214  cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 2, CV_RGB(255,0,255), 1);
215  }
216 
217  // Delete points that bk error is too big
218  // OK here just change state...
219  // NYT TEHAAN TURHAAN ASKEN MUKAAN OTETUILLE..
220  if( f->status3D == Feature::USE_FOR_POSE ||
221  f->status3D == Feature::IS_INITIAL )
222  {
223  double p3d[3] = {f->point3d.x, f->point3d.y, f->point3d.z};
224  CvMat p3dm = cvMat(1, 1, CV_64FC3, p3d);
225  double p2d[2];
226  CvMat p2dm = cvMat(2, 1, CV_64F, p2d);
227  cvReshape(&p2dm, &p2dm, 2, 1);
228 
229  double gl_mat[16];
230  _pose.GetMatrixGL(gl_mat);
231  _camera->ProjectPoints(&p3dm, gl_mat, &p2dm);
232 
233  if(image)
234  cvLine(image, cvPoint(int(p2d[0]), int(p2d[1])), cvPoint(int(f->point.x),int(f->point.y)), CV_RGB(255,0,255));
235 
236  double dist = (p2d[0]-f->point.x)*(p2d[0]-f->point.x)+(p2d[1]-f->point.y)*(p2d[1]-f->point.y);
237  if(dist > _outlier_limit)
238  f->status3D = Feature::IS_OUTLIER;
239  }
240 
241  it++;
242  }
243  return true;
244 }
245 
246 } // namespace alvar
Main ALVAR namespace.
Definition: Alvar.h:174
CvMat calib_D
Definition: Camera.h:87
f
double Optimize(CvMat *parameters, CvMat *measurements, double stop, int max_iter, EstimateCallback Estimate, void *param=0, OptimizeMethod method=LEVENBERGMARQUARDT, CvMat *parameters_mask=0, CvMat *J_mat=0, CvMat *weights=0)
Runs the optimization loop with selected parameters.
unsigned char * image
Definition: GlutViewer.cpp:155
Track orientation based only on features in the image plane.
Non-linear optimization routines. There are three methods implemented that include Gauss-Newton...
Definition: Optimization.h:44
static void Project(CvMat *state, CvMat *projection, void *x)
Definition: EC.cpp:50
CvMat calib_K
Definition: Camera.h:86
This file implements several optimization algorithms.
Pose representation derived from the Rotation class
Definition: Pose.h:50
enum alvar::TrackerOrientation::Feature::@2 status3D
cv::Mat gray
void Invert()
Definition: Pose.cpp:132
void GetMatrix(CvMat *mat) const
Definition: Pose.cpp:91
This file implements an orientation tracker.
int tracker
Definition: SampleTrack.cpp:10
r


ar_track_alvar
Author(s): Scott Niekum
autogenerated on Thu Jun 6 2019 19:27:24