Camera.cpp
Go to the documentation of this file.
1 /*
2  * This file is part of ALVAR, A Library for Virtual and Augmented Reality.
3  *
4  * Copyright 2007-2012 VTT Technical Research Centre of Finland
5  *
6  * Contact: VTT Augmented Reality Team <alvar.info@vtt.fi>
7  * <http://www.vtt.fi/multimedia/alvar.html>
8  *
9  * ALVAR is free software; you can redistribute it and/or modify it under the
10  * terms of the GNU Lesser General Public License as published by the Free
11  * Software Foundation; either version 2.1 of the License, or (at your option)
12  * any later version.
13  *
14  * This library is distributed in the hope that it will be useful, but WITHOUT
15  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16  * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
17  * for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public License
20  * along with ALVAR; if not, see
21  * <http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html>.
22  */
23 
24 #include "ar_track_alvar/Alvar.h"
25 #include "ar_track_alvar/Camera.h"
27 #include <memory>
28 
29 using namespace std;
30 
31 namespace alvar {
32 using namespace std;
33 
34 
35 void ProjPoints::Reset() {
36  object_points.clear();
37  image_points.clear();
38  point_counts.clear();
39 }
40 
41 // TODO: Does it matter what the etalon_square_size is???
42 bool ProjPoints::AddPointsUsingChessboard(IplImage *image, double etalon_square_size, int etalon_rows, int etalon_columns, bool visualize) {
43  if (image->width == 0) return false;
44  IplImage *gray = cvCreateImage(cvSize(image->width, image->height), IPL_DEPTH_8U, 1);
45  CvPoint2D32f *corners = new CvPoint2D32f[etalon_rows*etalon_columns];
46  if (image->nChannels == 1)
47  cvCopy(image, gray);
48  else
49  cvCvtColor(image, gray, CV_RGB2GRAY);
50  width = image->width;
51  height = image->height;
52 
53  int point_count = 0;
54  int pattern_was_found = cvFindChessboardCorners(gray, cvSize(etalon_rows, etalon_columns), corners, &point_count);
55  if (!pattern_was_found) point_count=0;
56  if (point_count > 0) {
57  cvFindCornerSubPix(gray, corners, point_count, cvSize(5,5), cvSize(-1,-1),
58  cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 10, 0.01f));
59  for (int i=0; i<point_count; i++) {
60  CvPoint3D64f po;
61  CvPoint2D64f pi;
62  po.x = etalon_square_size*(i%etalon_rows);
63  po.y = etalon_square_size*(i/etalon_rows);
64  po.z = 0;
65  pi.x = corners[i].x;
66  pi.y = corners[i].y;
67  object_points.push_back(po);
68  image_points.push_back(pi);
69  }
70  point_counts.push_back(point_count);
71  }
72  if (visualize) {
73  cvDrawChessboardCorners(image, cvSize(etalon_rows, etalon_columns),
74  corners, point_count, false /*pattern_was_found*/);
75  }
76  delete [] corners;
77  cvReleaseImage(&gray);
78  if (point_count > 0) return true;
79  return false;
80 }
81 
82 bool ProjPoints::AddPointsUsingMarkers(vector<PointDouble> &marker_corners, vector<PointDouble> &marker_corners_img, IplImage* image)
83 {
84  width = image->width;
85  height = image->height;
86  if ((marker_corners.size() == marker_corners_img.size()) &&
87  (marker_corners.size() == 4))
88  {
89  for (size_t p=0; p<marker_corners.size(); p++) {
90  CvPoint3D64f po;
91  CvPoint2D64f pi;
92  po.x = marker_corners[p].x;
93  po.y = marker_corners[p].y;
94  po.z = 0;
95  pi.x = marker_corners_img[p].x;
96  pi.y = marker_corners_img[p].y;
97  object_points.push_back(po);
98  image_points.push_back(pi);
99  }
100  point_counts.push_back(marker_corners.size());
101  }
102 
103  return true;
104 }
105 
106 Camera::Camera() {
107  calib_K = cvMat(3, 3, CV_64F, calib_K_data);
108  calib_D = cvMat(4, 1, CV_64F, calib_D_data);
109  memset(calib_K_data,0,sizeof(double)*3*3);
110  memset(calib_D_data,0,sizeof(double)*4);
111  calib_K_data[0][0] = 550; // Just some focal length by default
112  calib_K_data[1][1] = 550; // Just some focal length by default
113  calib_K_data[0][2] = 320;
114  calib_K_data[1][2] = 240;
115  calib_K_data[2][2] = 1;
116  calib_x_res = 640;
117  calib_y_res = 480;
118  x_res = 640;
119  y_res = 480;
120 }
121 
122 
123 Camera::Camera(ros::NodeHandle & n, std::string cam_info_topic):n_(n)
124 {
125  calib_K = cvMat(3, 3, CV_64F, calib_K_data);
126  calib_D = cvMat(4, 1, CV_64F, calib_D_data);
127  memset(calib_K_data,0,sizeof(double)*3*3);
128  memset(calib_D_data,0,sizeof(double)*4);
129  calib_K_data[0][0] = 550; // Just some focal length by default
130  calib_K_data[1][1] = 550; // Just some focal length by default
131  calib_K_data[0][2] = 320;
132  calib_K_data[1][2] = 240;
133  calib_K_data[2][2] = 1;
134  calib_x_res = 640;
135  calib_y_res = 480;
136  x_res = 640;
137  y_res = 480;
139  ROS_INFO ("Subscribing to info topic");
141  getCamInfo_ = false;
142 }
143 
144 
145 
146 //
147 //Camera::Camera(int w, int h) {
148 // calib_K = cvMat(3, 3, CV_64F, calib_K_data);
149 // calib_D = cvMat(4, 1, CV_64F, calib_D_data);
150 // memset(calib_K_data,0,sizeof(double)*3*3);
151 // memset(calib_D_data,0,sizeof(double)*4);
152 // calib_K_data[0][0] = w/2;
153 // calib_K_data[1][1] = w/2;
154 // calib_K_data[0][2] = w/2;
155 // calib_K_data[1][2] = h/2;
156 // calib_K_data[2][2] = 1;
157 // calib_x_res = w;
158 // calib_y_res = h;
159 // x_res = w;
160 // y_res = h;
161 //}
162 
163 void Camera::SetSimpleCalib(int _x_res, int _y_res, double f_fac)
164 {
165  memset(calib_K_data,0,sizeof(double)*3*3);
166  memset(calib_D_data,0,sizeof(double)*4);
167  calib_K_data[0][0] = _x_res*f_fac; // Just some focal length by default
168  calib_K_data[1][1] = _x_res*f_fac; // Just some focal length by default
169  calib_K_data[0][2] = _x_res/2;
170  calib_K_data[1][2] = _y_res/2;
171  calib_K_data[2][2] = 1;
172  calib_x_res = _x_res;
173  calib_y_res = _y_res;
174 }
175 
176 bool Camera::LoadCalibXML(const char *calibfile) {
177  TiXmlDocument document;
178  if (!document.LoadFile(calibfile)) return false;
179  TiXmlElement *xml_root = document.RootElement();
180 
181  return
182  xml_root->QueryIntAttribute("width", &calib_x_res) == TIXML_SUCCESS &&
183  xml_root->QueryIntAttribute("height", &calib_y_res) == TIXML_SUCCESS &&
184  FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("intrinsic_matrix"), &calib_K) &&
185  FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("distortion"), &calib_D);
186 }
187 
188 bool Camera::LoadCalibOpenCV(const char *calibfile) {
189  cvSetErrMode(CV_ErrModeSilent);
190  CvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_READ);
191  cvSetErrMode(CV_ErrModeLeaf);
192  if(fs){
193  CvFileNode* root_node = cvGetRootFileNode(fs);
194  // K Intrinsic
195  CvFileNode* intrinsic_mat_node = cvGetFileNodeByName(fs, root_node, "intrinsic_matrix");
196  CvMat* intrinsic_mat = reinterpret_cast<CvMat*>(cvRead(fs, intrinsic_mat_node));
197  cvmSet(&calib_K, 0, 0, cvmGet(intrinsic_mat, 0, 0));
198  cvmSet(&calib_K, 0, 1, cvmGet(intrinsic_mat, 0, 1));
199  cvmSet(&calib_K, 0, 2, cvmGet(intrinsic_mat, 0, 2));
200  cvmSet(&calib_K, 1, 0, cvmGet(intrinsic_mat, 1, 0));
201  cvmSet(&calib_K, 1, 1, cvmGet(intrinsic_mat, 1, 1));
202  cvmSet(&calib_K, 1, 2, cvmGet(intrinsic_mat, 1, 2));
203  cvmSet(&calib_K, 2, 0, cvmGet(intrinsic_mat, 2, 0));
204  cvmSet(&calib_K, 2, 1, cvmGet(intrinsic_mat, 2, 1));
205  cvmSet(&calib_K, 2, 2, cvmGet(intrinsic_mat, 2, 2));
206 
207  // D Distortion
208  CvFileNode* dist_mat_node = cvGetFileNodeByName(fs, root_node, "distortion");
209  CvMat* dist_mat = reinterpret_cast<CvMat*>(cvRead(fs, dist_mat_node));
210  cvmSet(&calib_D, 0, 0, cvmGet(dist_mat, 0, 0));
211  cvmSet(&calib_D, 1, 0, cvmGet(dist_mat, 1, 0));
212  cvmSet(&calib_D, 2, 0, cvmGet(dist_mat, 2, 0));
213  cvmSet(&calib_D, 3, 0, cvmGet(dist_mat, 3, 0));
214 
215  // Resolution
216  CvFileNode* width_node = cvGetFileNodeByName(fs, root_node, "width");
217  CvFileNode* height_node = cvGetFileNodeByName(fs, root_node, "height");
218  calib_x_res = width_node->data.i;
219  calib_y_res = height_node->data.i;
220  cvReleaseFileStorage(&fs);
221  return true;
222  }
223  // reset error status
224  cvSetErrStatus(CV_StsOk);
225  return false;
226 }
227 
228 void Camera::SetCameraInfo(const sensor_msgs::CameraInfo &camInfo)
229 {
230  cam_info_ = camInfo;
231 
232  calib_x_res = cam_info_.width;
233  calib_y_res = cam_info_.height;
234  x_res = calib_x_res;
235  y_res = calib_y_res;
236 
237  cvmSet(&calib_K, 0, 0, cam_info_.K[0]);
238  cvmSet(&calib_K, 0, 1, cam_info_.K[1]);
239  cvmSet(&calib_K, 0, 2, cam_info_.K[2]);
240  cvmSet(&calib_K, 1, 0, cam_info_.K[3]);
241  cvmSet(&calib_K, 1, 1, cam_info_.K[4]);
242  cvmSet(&calib_K, 1, 2, cam_info_.K[5]);
243  cvmSet(&calib_K, 2, 0, cam_info_.K[6]);
244  cvmSet(&calib_K, 2, 1, cam_info_.K[7]);
245  cvmSet(&calib_K, 2, 2, cam_info_.K[8]);
246 
247  if (cam_info_.D.size() >= 4) {
248  cvmSet(&calib_D, 0, 0, cam_info_.D[0]);
249  cvmSet(&calib_D, 1, 0, cam_info_.D[1]);
250  cvmSet(&calib_D, 2, 0, cam_info_.D[2]);
251  cvmSet(&calib_D, 3, 0, cam_info_.D[3]);
252  } else {
253  cvmSet(&calib_D, 0, 0, 0);
254  cvmSet(&calib_D, 1, 0, 0);
255  cvmSet(&calib_D, 2, 0, 0);
256  cvmSet(&calib_D, 3, 0, 0);
257  }
258 }
259 
260 void Camera::camInfoCallback (const sensor_msgs::CameraInfoConstPtr & cam_info)
261  {
262  if (!getCamInfo_)
263  {
264  SetCameraInfo(*cam_info);
265  getCamInfo_ = true;
266  sub_.shutdown();
267  }
268  }
269 
270 bool Camera::SetCalib(const char *calibfile, int _x_res, int _y_res, FILE_FORMAT format) {
271  x_res = _x_res;
272  y_res = _y_res;
273  if(!calibfile) return false;
274 
275  bool success = false;
276  switch (format) {
277  case FILE_FORMAT_XML:
278  success = LoadCalibXML(calibfile);
279  break;
280  case FILE_FORMAT_OPENCV:
281  case FILE_FORMAT_DEFAULT:
282  success = LoadCalibOpenCV(calibfile);
283  break;
284  default:
285  // TODO: throw exception?
286  break;
287  };
288 
289  if (success) {
290  // Scale matrix in case of different resolution calibration.
291  // The OpenCV documentation says:
292  // - If an image from camera is up-sampled/down-sampled by some factor, all intrinsic camera parameters
293  // (fx, fy, cx and cy) should be scaled (multiplied/divided, respectively) by the same factor.
294  // - The distortion coefficients remain the same regardless of the captured image resolution.
295  if ((calib_x_res != x_res) || (calib_y_res != y_res)) {
296  calib_K_data[0][0] *= (double(x_res)/double(calib_x_res));
297  calib_K_data[0][2] *= (double(x_res)/double(calib_x_res));
298  calib_K_data[1][1] *= (double(y_res)/double(calib_y_res));
299  calib_K_data[1][2] *= (double(y_res)/double(calib_y_res));
300  }
301  }
302  return success;
303 }
304 
305 bool Camera::SaveCalibXML(const char *calibfile) {
306  TiXmlDocument document;
307  document.LinkEndChild(new TiXmlDeclaration("1.0", "UTF-8", "no"));
308  document.LinkEndChild(new TiXmlElement("camera"));
309  TiXmlElement *xml_root = document.RootElement();
310  xml_root->SetAttribute("width", calib_x_res);
311  xml_root->SetAttribute("height", calib_y_res);
312  xml_root->LinkEndChild(FileFormatUtils::createXMLMatrix("intrinsic_matrix", &calib_K));
313  xml_root->LinkEndChild(FileFormatUtils::createXMLMatrix("distortion", &calib_D));
314  return document.SaveFile(calibfile);
315 }
316 
317 bool Camera::SaveCalibOpenCV(const char *calibfile) {
318  cvSetErrMode(CV_ErrModeSilent);
319  CvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_WRITE);
320  cvSetErrMode(CV_ErrModeLeaf);
321  if(fs){
322  cvWrite(fs, "intrinsic_matrix", &calib_K, cvAttrList(0,0));
323  cvWrite(fs, "distortion", &calib_D, cvAttrList(0,0));
324  //cvWriteReal(fs, "fov_x", data.fov_x);
325  //cvWriteReal(fs, "fov_y", data.fov_y);
326  cvWriteInt(fs, "width", calib_x_res);
327  cvWriteInt(fs, "height", calib_y_res);
328  cvReleaseFileStorage(&fs);
329  return true;
330  }
331  // reset error status
332  cvSetErrStatus(CV_StsOk);
333  return false;
334 }
335 
336 bool Camera::SaveCalib(const char *calibfile, FILE_FORMAT format) {
337  if(!calibfile)
338  return false;
339 
340  switch (format) {
341  case FILE_FORMAT_XML:
342  return SaveCalibXML(calibfile);
343  case FILE_FORMAT_OPENCV:
344  case FILE_FORMAT_DEFAULT:
345  return SaveCalibOpenCV(calibfile);
346  default:
347  return false;
348  };
349 }
350 
352 {
353  CvMat *object_points = cvCreateMat((int)pp.object_points.size(), 1, CV_32FC3);
354  CvMat *image_points = cvCreateMat((int)pp.image_points.size(), 1, CV_32FC2);
355  const CvMat point_counts = cvMat((int)pp.point_counts.size(), 1, CV_32SC1, &pp.point_counts[0]);
356  for (size_t i=0; i<pp.object_points.size(); i++) {
357  object_points->data.fl[i*3+0] = (float)pp.object_points[i].x;
358  object_points->data.fl[i*3+1] = (float)pp.object_points[i].y;
359  object_points->data.fl[i*3+2] = (float)pp.object_points[i].z;
360  image_points->data.fl[i*2+0] = (float)pp.image_points[i].x;
361  image_points->data.fl[i*2+1] = (float)pp.image_points[i].y;
362  }
363  cvCalibrateCamera2(object_points, image_points, &point_counts,
364  cvSize(pp.width, pp.height),
365  &calib_K, &calib_D, 0, 0, CV_CALIB_USE_INTRINSIC_GUESS);
366 
367  calib_x_res = pp.width;
368  calib_y_res = pp.height;
369 
370  cvReleaseMat(&object_points);
371  cvReleaseMat(&image_points);
372 }
373 
374 void Camera::SetRes(int _x_res, int _y_res) {
375  x_res = _x_res;
376  y_res = _y_res;
377  // Scale matrix in case of different resolution calibration.
378  // The OpenCV documentation says:
379  // - If an image from camera is up-sampled/down-sampled by some factor, all intrinsic camera parameters
380  // (fx, fy, cx and cy) should be scaled (multiplied/divided, respectively) by the same factor.
381  // - The distortion coefficients remain the same regardless of the captured image resolution.
382  if ((calib_x_res != x_res) || (calib_y_res != y_res)) {
383  calib_K_data[0][0] *= (double(x_res)/double(calib_x_res));
384  calib_K_data[0][2] *= (double(x_res)/double(calib_x_res));
385  calib_K_data[1][1] *= (double(y_res)/double(calib_y_res));
386  calib_K_data[1][2] *= (double(y_res)/double(calib_y_res));
387  }
388 }
389 
390 // TODO: Better approach for this...
391 // Note, the proj_matrix[8] is now negated. This is due to the fact
392 // that with OpenCV and OpenGL projection matrices both y and z
393 // should be mirrored. All other components are
394 void Camera::GetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height, const float far_clip /*= 1000.0f*/, const float near_clip /*= 0.1f*/) {
395  proj_matrix[0] = 2.0f * calib_K_data[0][0] / float(width);
396  proj_matrix[1] = 0;
397  proj_matrix[2] = 0;
398  proj_matrix[3] = 0;
399  proj_matrix[4] = 2.0f * calib_K_data[0][1] / float(width); // skew
400  proj_matrix[5] = 2.0f * calib_K_data[1][1] / float(height);
401  proj_matrix[6] = 0;
402  proj_matrix[7] = 0;
403  //proj_matrix[8] = (2.0f * calib_K_data[0][2] / float(width)) - 1.0f;
404  proj_matrix[8] = -(2.0f * calib_K_data[0][2] / float(width)) + 1.0f;
405  proj_matrix[9] = (2.0f * calib_K_data[1][2] / float(height)) - 1.0f;
406  proj_matrix[10] = -(far_clip + near_clip)/(far_clip - near_clip);
407  proj_matrix[11] = -1.0f;
408  proj_matrix[12] = 0;
409  proj_matrix[13] = 0;
410  proj_matrix[14] = -2.0f * far_clip * near_clip / (far_clip - near_clip);
411  proj_matrix[15] = 0;
412 }
413 
414 void Camera::SetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height) {
415  x_res = width;
416  y_res = height;
417  calib_x_res = width;
419  calib_K_data[0][0] = proj_matrix[0] * float(width) / 2.0f;
420  calib_K_data[0][1] = proj_matrix[4] * float(width) / 2.0f;
421  calib_K_data[1][1] = proj_matrix[5] * float(height) / 2.0f;
422  //calib_K_data[0][2] = (proj_matrix[8] + 1.0f) * float(width) / 2.0f;
423  calib_K_data[0][2] = (-proj_matrix[8] + 1.0f) * float(width) / 2.0f; // Is this ok?
424  calib_K_data[1][2] = (proj_matrix[9] + 1.0f) * float(height) / 2.0f;
425  calib_K_data[2][2] = 1;
426 }
427 
429 {
430 /*
431  // focal length
432  double ifx = 1./cvmGet(&calib_K, 0, 0);
433  double ify = 1./cvmGet(&calib_K, 1, 1);
434 
435  // principal point
436  double cx = cvmGet(&calib_K, 0, 2);
437  double cy = cvmGet(&calib_K, 1, 2);
438 
439  // distortion coeffs
440  double* k = calib_D.data.db;
441 
442  // compensate distortion iteratively
443  double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y;
444  for(int j = 0; j < 5; j++){
445  double r2 = x*x + y*y;
446  double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
447  double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
448  double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
449  x = (x0 - delta_x)*icdist;
450  y = (y0 - delta_y)*icdist;
451  }
452  // apply compensation
453  point.x = x/ifx + cx;
454  point.y = y/ify + cy;
455 */
456 }
457 
458 void Camera::Undistort(vector<PointDouble >& points)
459 {
460 /*
461  // focal length
462  double ifx = 1./cvmGet(&calib_K, 0, 0);
463  double ify = 1./cvmGet(&calib_K, 1, 1);
464 
465  // principal point
466  double cx = cvmGet(&calib_K, 0, 2);
467  double cy = cvmGet(&calib_K, 1, 2);
468 
469  // distortion coeffs
470  double* k = calib_D.data.db;
471 
472  for(unsigned int i = 0; i < points.size(); i++)
473  {
474  // compensate distortion iteratively
475  double x = (points[i].x - cx)*ifx, y = (points[i].y - cy)*ify, x0 = x, y0 = y;
476  for(int j = 0; j < 5; j++){
477  double r2 = x*x + y*y;
478  double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
479  double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
480  double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
481  x = (x0 - delta_x)*icdist;
482  y = (y0 - delta_y)*icdist;
483  }
484  // apply compensation
485  points[i].x = x/ifx + cx;
486  points[i].y = y/ify + cy;
487  }
488 */
489 }
490 
491 void Camera::Undistort(CvPoint2D32f& point)
492 {
493 /*
494  // focal length
495  double ifx = 1./cvmGet(&calib_K, 0, 0);
496  double ify = 1./cvmGet(&calib_K, 1, 1);
497 
498  // principal point
499  double cx = cvmGet(&calib_K, 0, 2);
500  double cy = cvmGet(&calib_K, 1, 2);
501 
502  // distortion coeffs
503  double* k = calib_D.data.db;
504 
505 
506  // compensate distortion iteratively
507  double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y;
508  for(int j = 0; j < 5; j++){
509  double r2 = x*x + y*y;
510  double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
511  double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
512  double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
513  x = (x0 - delta_x)*icdist;
514  y = (y0 - delta_y)*icdist;
515  }
516  // apply compensation
517  point.x = float(x/ifx + cx);
518  point.y = float(y/ify + cy);
519 */
520 }
521 
522 /*
523  template<class PointType>
524  void Undistort(PointType& point) {
525  // focal length
526  double ifx = 1./cvmGet(&calib_K, 0, 0);
527  double ify = 1./cvmGet(&calib_K, 1, 1);
528 
529  // principal point
530  double cx = cvmGet(&calib_K, 0, 2);
531  double cy = cvmGet(&calib_K, 1, 2);
532 
533  // distortion coeffs
534  double* k = calib_D.data.db;
535 
536  // compensate distortion iteratively
537  double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y;
538  for(int j = 0; j < 5; j++){
539  double r2 = x*x + y*y;
540  double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2);
541  double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x);
542  double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y;
543  x = (x0 - delta_x)*icdist;
544  y = (y0 - delta_y)*icdist;
545  }
546  // apply compensation
547  point.x = x/ifx + cx;
548  point.y = y/ify + cy;
549  }
550 */
551 
552 void Camera::Distort(vector<PointDouble>& points)
553 {
554 /*
555  double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy
556  double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1);
557  double _fx = 1./fx, _fy = 1./fy;
558  double* k = calib_D.data.db;
559 
560  double k1 = k[0], k2 = k[1];
561  double p1 = k[2], p2 = k[3];
562 
563  for(unsigned int i = 0; i < points.size(); i++)
564  {
565  // Distort
566  double y = (points[i].y - v0)*_fy;
567  double y2 = y*y;
568  double _2p1y = 2*p1*y;
569  double _3p1y2 = 3*p1*y2;
570  double p2y2 = p2*y2;
571 
572  double x = (points[i].x - u0)*_fx;
573  double x2 = x*x;
574  double r2 = x2 + y2;
575  double d = 1 + (k1 + k2*r2)*r2;
576 
577  points[i].x = fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0;
578  points[i].y = fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0;
579  }
580 */
581 }
582 
584 {
585 /*
586  double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy
587  double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1);
588  double _fx = 1./fx, _fy = 1./fy;
589  double* k = calib_D.data.db;
590 
591  double k1 = k[0], k2 = k[1];
592  double p1 = k[2], p2 = k[3];
593 
594  // Distort
595  double y = (point.y - v0)*_fy;
596  double y2 = y*y;
597  double _2p1y = 2*p1*y;
598  double _3p1y2 = 3*p1*y2;
599  double p2y2 = p2*y2;
600 
601  double x = (point.x - u0)*_fx;
602  double x2 = x*x;
603  double r2 = x2 + y2;
604  double d = 1 + (k1 + k2*r2)*r2;
605 
606  point.x = fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0;
607  point.y = fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0;
608 */
609 }
610 
611 void Camera::Distort(CvPoint2D32f & point)
612 {
613 /*
614  double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy
615  double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1);
616  double _fx = 1./fx, _fy = 1./fy;
617  double* k = calib_D.data.db;
618 
619  double k1 = k[0], k2 = k[1];
620  double p1 = k[2], p2 = k[3];
621 
622  // Distort
623  double y = (point.y - v0)*_fy;
624  double y2 = y*y;
625  double _2p1y = 2*p1*y;
626  double _3p1y2 = 3*p1*y2;
627  double p2y2 = p2*y2;
628 
629  double x = (point.x - u0)*_fx;
630  double x2 = x*x;
631  double r2 = x2 + y2;
632  double d = 1 + (k1 + k2*r2)*r2;
633 
634  point.x = float(fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0);
635  point.y = float(fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0);
636 */
637 }
638 
639 void Camera::CalcExteriorOrientation(vector<CvPoint3D64f>& pw, vector<CvPoint2D64f>& pi,
640  Pose *pose)
641 {
642  double ext_rodriques[3];
643  double ext_translate[3];
644  CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
645  CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
646  CvMat *object_points = cvCreateMat((int)pw.size(), 1, CV_32FC3);
647  CvMat *image_points = cvCreateMat((int)pi.size(), 1, CV_32FC2);
648  for (size_t i=0; i<pw.size(); i++) {
649  object_points->data.fl[i*3+0] = (float)pw[i].x;
650  object_points->data.fl[i*3+1] = (float)pw[i].y;
651  object_points->data.fl[i*3+2] = (float)pw[i].z;
652  image_points->data.fl[i*2+0] = (float)pi[i].x;
653  image_points->data.fl[i*2+1] = (float)pi[i].y;
654  }
655  //cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, &calib_D, &ext_rodriques_mat, &ext_translate_mat);
656  cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, NULL, &ext_rodriques_mat, &ext_translate_mat);
657  pose->SetRodriques(&ext_rodriques_mat);
658  pose->SetTranslation(&ext_translate_mat);
659  cvReleaseMat(&object_points);
660  cvReleaseMat(&image_points);
661 }
662 
663 void Camera::CalcExteriorOrientation(vector<CvPoint3D64f>& pw, vector<PointDouble >& pi,
664  CvMat *rodriques, CvMat *tra)
665 {
666  //assert(pw.size() == pi.size());
667 
668  int size = (int)pi.size();
669 
670  CvPoint3D64f *world_pts = new CvPoint3D64f[size];
671  CvPoint2D64f *image_pts = new CvPoint2D64f[size];
672 
673  for(int i = 0; i < size; i++){
674  world_pts[i].x = pw[i].x;
675  world_pts[i].y = pw[i].y;
676  world_pts[i].z = pw[i].z;
677  // flip image points! Why???
678  //image_pts[i].x = x_res - pi[i].x;
679  //image_pts[i].y = y_res - pi[i].y;
680  image_pts[i].x = pi[i].x;
681  image_pts[i].y = pi[i].y;
682  }
683 
684  double rot[3]; // rotation vector
685  CvMat world_mat, image_mat, rot_vec;
686  cvInitMatHeader(&world_mat, size, 1, CV_64FC3, world_pts);
687  cvInitMatHeader(&image_mat, size, 1, CV_64FC2, image_pts);
688  cvInitMatHeader(&rot_vec, 3, 1, CV_64FC1, rot);
689 
690  cvZero(tra);
691  //cvmodFindExtrinsicCameraParams2(&world_mat, &image_mat, &calib_K, &calib_D, rodriques, tra, error);
692  cvFindExtrinsicCameraParams2(&world_mat, &image_mat, &calib_K, &calib_D, rodriques, tra);
693 
694  delete[] world_pts;
695  delete[] image_pts;
696 }
697 
698 void Camera::CalcExteriorOrientation(vector<PointDouble >& pw, vector<PointDouble >& pi,
699  CvMat *rodriques, CvMat *tra)
700 {
701  //assert(pw.size() == pi.size());
702  int size = (int)pi.size();
703 
704  vector<CvPoint3D64f> pw3;
705  pw3.resize(size);
706  for (int i=0; i<size; i++) {
707  pw3[i].x = pw[i].x;
708  pw3[i].y = pw[i].y;
709  pw3[i].z = 0;
710  }
711 
712  CalcExteriorOrientation(pw3, pi, rodriques, tra);
713 }
714 
715 void Camera::CalcExteriorOrientation(vector<PointDouble>& pw, vector<PointDouble >& pi, Pose *pose)
716 {
717  double ext_rodriques[3];
718  double ext_translate[3];
719  CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
720  CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
721  CalcExteriorOrientation(pw, pi, &ext_rodriques_mat, &ext_translate_mat);
722  pose->SetRodriques(&ext_rodriques_mat);
723  pose->SetTranslation(&ext_translate_mat);
724 }
725 
726 bool Camera::CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, CvMat *rodriques, CvMat *tra) {
727  cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, &calib_D, rodriques, tra);
728  return true;
729 }
730 
731 bool Camera::CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, Pose *pose) {
732  double ext_rodriques[3];
733  double ext_translate[3];
734  CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
735  CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
736  bool ret = CalcExteriorOrientation(object_points, image_points, &ext_rodriques_mat, &ext_translate_mat);
737  pose->SetRodriques(&ext_rodriques_mat);
738  pose->SetTranslation(&ext_translate_mat);
739  return ret;
740 }
741 
742 void Camera::ProjectPoints(vector<CvPoint3D64f>& pw, Pose *pose, vector<CvPoint2D64f>& pi) const {
743  double ext_rodriques[3];
744  double ext_translate[3];
745  CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
746  CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
747  pose->GetRodriques(&ext_rodriques_mat);
748  pose->GetTranslation(&ext_translate_mat);
749  CvMat *object_points = cvCreateMat((int)pw.size(), 1, CV_32FC3);
750  CvMat *image_points = cvCreateMat((int)pi.size(), 1, CV_32FC2);
751  for (size_t i=0; i<pw.size(); i++) {
752  object_points->data.fl[i*3+0] = (float)pw[i].x;
753  object_points->data.fl[i*3+1] = (float)pw[i].y;
754  object_points->data.fl[i*3+2] = (float)pw[i].z;
755  }
756  cvProjectPoints2(object_points, &ext_rodriques_mat, &ext_translate_mat, &calib_K, &calib_D, image_points);
757  for (size_t i=0; i<pw.size(); i++) {
758  pi[i].x = image_points->data.fl[i*2+0];
759  pi[i].y = image_points->data.fl[i*2+1];
760  }
761  cvReleaseMat(&object_points);
762  cvReleaseMat(&image_points);
763 }
764 
765 void Camera::ProjectPoints(const CvMat* object_points, const CvMat* rotation_vector,
766  const CvMat* translation_vector, CvMat* image_points) const
767 {
768  // Project points
769  cvProjectPoints2(object_points, rotation_vector, translation_vector, &calib_K, &calib_D, image_points);
770 }
771 
772 void Camera::ProjectPoints(const CvMat* object_points, const Pose* pose, CvMat* image_points) const
773 {
774  double ext_rodriques[3];
775  double ext_translate[3];
776  CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques);
777  CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate);
778  pose->GetRodriques(&ext_rodriques_mat);
779  pose->GetTranslation(&ext_translate_mat);
780  cvProjectPoints2(object_points, &ext_rodriques_mat, &ext_translate_mat, &calib_K, &calib_D, image_points);
781 }
782 
783 void Camera::ProjectPoints(const CvMat* object_points, double gl[16], CvMat* image_points) const
784 {
785  double glm[4][4] = {
786  gl[0], gl[4], gl[8], gl[12],
787  gl[1], gl[5], gl[9], gl[13],
788  gl[2], gl[6], gl[10], gl[14],
789  gl[3], gl[7], gl[11], gl[15],
790  };
791  CvMat glm_mat = cvMat(4, 4, CV_64F, glm);
792 
793  // For some reason we need to mirror both y and z ???
794  double cv_mul_data[4][4];
795  CvMat cv_mul = cvMat(4, 4, CV_64F, cv_mul_data);
796  cvSetIdentity(&cv_mul);
797  cvmSet(&cv_mul, 1, 1, -1);
798  cvmSet(&cv_mul, 2, 2, -1);
799  cvMatMul(&cv_mul, &glm_mat, &glm_mat);
800 
801  // Rotation
802  Rotation r;
803  r.SetMatrix(&glm_mat);
804  double rod[3];
805  CvMat rod_mat=cvMat(3, 1, CV_64F, rod);
806  r.GetRodriques(&rod_mat);
807  // Translation
808  double tra[3] = { glm[0][3], glm[1][3], glm[2][3] };
809  CvMat tra_mat = cvMat(3, 1, CV_64F, tra);
810  // Project points
811  ProjectPoints(object_points, &rod_mat, &tra_mat, image_points);
812 }
813 
814 void Camera::ProjectPoint(const CvPoint3D64f pw, const Pose *pose, CvPoint2D64f &pi) const {
815  float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z};
816  float image_points_data[2] = {0};
817  CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data);
818  CvMat image_points = cvMat(1, 1, CV_32FC2, image_points_data);
819  ProjectPoints(&object_points, pose, &image_points);
820  pi.x = image_points.data.fl[0];
821  pi.y = image_points.data.fl[1];
822 }
823 
824 void Camera::ProjectPoint(const CvPoint3D32f pw, const Pose *pose, CvPoint2D32f &pi) const {
825  float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z};
826  float image_points_data[2] = {0};
827  CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data);
828  CvMat image_points = cvMat(1, 1, CV_32FC2, image_points_data);
829  ProjectPoints(&object_points, pose, &image_points);
830  pi.x = image_points.data.fl[0];
831  pi.y = image_points.data.fl[1];
832 }
833 
835  cvInitMatHeader(&H, 3, 3, CV_64F, H_data);
836 }
837 
838 void Homography::Find(const vector<PointDouble >& pw, const vector<PointDouble >& pi)
839 {
840  assert(pw.size() == pi.size());
841  int size = (int)pi.size();
842 
843  CvPoint2D64f *srcp = new CvPoint2D64f[size];
844  CvPoint2D64f *dstp = new CvPoint2D64f[size];
845 
846  for(int i = 0; i < size; ++i){
847  srcp[i].x = pw[i].x;
848  srcp[i].y = pw[i].y;
849 
850  dstp[i].x = pi[i].x;
851  dstp[i].y = pi[i].y;
852  }
853 
854  CvMat src_pts, dst_pts;
855  cvInitMatHeader(&dst_pts, 1, size, CV_64FC2, dstp);
856  cvInitMatHeader(&src_pts, 1, size, CV_64FC2, srcp);
857 
858 #ifdef OPENCV11
859  cvFindHomography(&src_pts, &dst_pts, &H, 0, 0, 0);
860 #else
861  cvFindHomography(&src_pts, &dst_pts, &H);
862 #endif
863 
864  delete[] srcp;
865  delete[] dstp;
866 }
867 
868 void Homography::ProjectPoints(const vector<PointDouble>& from, vector<PointDouble>& to)
869 {
870  int size = (int)from.size();
871 
872  CvPoint3D64f *srcp = new CvPoint3D64f[size];
873 
874  for(int i = 0; i < size; ++i){
875  srcp[i].x = from[i].x;
876  srcp[i].y = from[i].y;
877  srcp[i].z = 1;
878  }
879 
880  CvPoint3D64f *dstp = new CvPoint3D64f[size];
881 
882  CvMat src_pts, dst_pts;
883  cvInitMatHeader(&src_pts, 1, size, CV_64FC3, srcp);
884  cvInitMatHeader(&dst_pts, 1, size, CV_64FC3, dstp);
885 
886  cvTransform(&src_pts, &dst_pts, &H);
887 
888  to.clear();
889  for(int i = 0; i < size; ++i)
890  {
891  PointDouble pt;
892  pt.x = dstp[i].x / dstp[i].z;
893  pt.y = dstp[i].y / dstp[i].z;
894 
895  to.push_back(pt);
896  }
897 
898  delete[] srcp;
899  delete[] dstp;
900 }
901 
902 } // namespace alvar
Main ALVAR namespace.
Definition: Alvar.h:174
ros::Subscriber sub_
Definition: Camera.h:98
Default file format.
Definition: FileFormat.h:45
const int etalon_columns
void GetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height, const float far_clip=1000.0f, const float near_clip=0.1f)
Get OpenGL matrix Generates the OpenGL projection matrix based on OpenCV intrinsic camera matrix K...
Definition: Camera.cpp:394
void GetRodriques(CvMat *mat) const
Returns the rotation in rotation vector form.
Definition: Rotation.cpp:341
void camInfoCallback(const sensor_msgs::CameraInfoConstPtr &)
Definition: Camera.cpp:260
bool SaveCalib(const char *calibfile, FILE_FORMAT format=FILE_FORMAT_DEFAULT)
Save the current calibration information to a file.
Definition: Camera.cpp:336
CvMat calib_D
Definition: Camera.h:87
This file implements a camera used for projecting points and computing homographies.
f
std::string cam_info_topic
Subscriber subscribe(const std::string &topic, uint32_t queue_size, void(T::*fp)(M), T *obj, const TransportHints &transport_hints=TransportHints())
void GetTranslation(CvMat *tra) const
Definition: Pose.cpp:168
void SetCameraInfo(const sensor_msgs::CameraInfo &camInfo)
Definition: Camera.cpp:228
int x_res
Definition: Camera.h:90
int visualize
std::vector< int > point_counts
Vector indicating how many points are detected for each frame.
Definition: Camera.h:66
void SetRes(int _x_res, int _y_res)
If we have no calibration file we can still adjust the default calibration to current resolution...
Definition: Camera.cpp:374
unsigned char * image
Definition: GlutViewer.cpp:155
void SetMatrix(const CvMat *mat)
Sets the rotation from given rotation matrix. 3x3 and 4x4 matrices are allowed.
Definition: Rotation.cpp:321
TFSIMD_FORCE_INLINE const tfScalar & y() const
int height
Definition: GlutViewer.cpp:160
static TiXmlElement * createXMLMatrix(const char *element_name, const CvMat *matrix)
Allocates new XML element and populates it with a CvMat data.
int calib_x_res
Definition: Camera.h:88
void SetSimpleCalib(int _x_res, int _y_res, double f_fac=1.)
Definition: Camera.cpp:163
bool SetCalib(const char *calibfile, int _x_res, int _y_res, FILE_FORMAT format=FILE_FORMAT_DEFAULT)
Set the calibration file and the current resolution for which the calibration is adjusted to...
Definition: Camera.cpp:270
ros::NodeHandle n_
Definition: Camera.h:99
int calib_y_res
Definition: Camera.h:89
std::vector< CvPoint3D64f > object_points
3D object points corresponding with the detected 2D image points.
Definition: Camera.h:57
CvMat calib_K
Definition: Camera.h:86
FILE_FORMAT
Definition: FileFormat.h:39
sensor_msgs::CameraInfo cam_info_
Definition: Camera.h:96
#define ROS_INFO(...)
void ProjectPoints(std::vector< CvPoint3D64f > &pw, Pose *pose, std::vector< CvPoint2D64f > &pi) const
Project points.
void Find(const std::vector< PointDouble > &pw, const std::vector< PointDouble > &pi)
Find Homography for two point-sets.
Definition: Camera.cpp:838
void ProjectPoint(const CvPoint3D64f pw, const Pose *pose, CvPoint2D64f &pi) const
Project one point.
Definition: Camera.cpp:814
This file implements utilities that assist in reading and writing files.
Simple structure for collecting 2D and 3D points e.g. for camera calibration.
Definition: Camera.h:52
Pose representation derived from the Rotation class
Definition: Pose.h:50
int width
Definition: GlutViewer.cpp:159
bool SaveCalibOpenCV(const char *calibfile)
Definition: Camera.cpp:317
TFSIMD_FORCE_INLINE const tfScalar & x() const
void Calibrate(ProjPoints &pp)
Calibrate using the collected ProjPoints.
Definition: Camera.cpp:351
const int etalon_rows
cv::Mat gray
File format written with cvWrite.
Definition: FileFormat.h:52
XML file format.
Definition: FileFormat.h:66
void SetRodriques(const CvMat *mat)
Sets the rotation from given rotation vector.
Definition: Rotation.cpp:314
Homography()
Constructor.
Definition: Camera.cpp:834
std::vector< CvPoint2D64f > image_points
Detected 2D object points If point_counts[0] == 10, then the first 10 points are detected in the firs...
Definition: Camera.h:64
TFSIMD_FORCE_INLINE const tfScalar & z() const
double calib_K_data[3][3]
Definition: Camera.h:86
void Distort(CvPoint2D32f &point)
Applys the lens distortion for one point on an image plane.
Definition: Camera.cpp:611
bool SaveCalibXML(const char *calibfile)
Definition: Camera.cpp:305
ALVAR_EXPORT Point< CvPoint2D64f > PointDouble
The default double point type.
Definition: Util.h:108
void ProjectPoints(const std::vector< PointDouble > &from, std::vector< PointDouble > &to)
Project points using the Homography.
Definition: Camera.cpp:868
void CalcExteriorOrientation(std::vector< CvPoint3D64f > &pw, std::vector< CvPoint2D64f > &pi, Pose *pose)
Calculate exterior orientation.
bool LoadCalibOpenCV(const char *calibfile)
Definition: Camera.cpp:188
double calib_D_data[4]
Definition: Camera.h:87
void SetTranslation(const CvMat *tra)
Definition: Pose.cpp:150
bool LoadCalibXML(const char *calibfile)
Definition: Camera.cpp:176
Rotation structure and transformations between different parameterizations.
Definition: Rotation.h:43
r
This file defines library export definitions, version numbers and build information.
int y_res
Definition: Camera.h:91
void Undistort(std::vector< PointDouble > &points)
Unapplys the lens distortion for points on image plane.
static bool parseXMLMatrix(const TiXmlElement *xml_matrix, CvMat *matrix)
Reads contents of alvar:matrix into CvMat.
std::string cameraInfoTopic_
Definition: Camera.h:95
void SetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height)
Invert operation for GetOpenglProjectionMatrix.
Definition: Camera.cpp:414
bool getCamInfo_
Definition: Camera.h:92


ar_track_alvar
Author(s): Scott Niekum
autogenerated on Mon Jun 10 2019 12:47:03