ba_ex_cal.cpp
Go to the documentation of this file.
00001 #include <stdlib.h>
00002 #include <ostream>
00003 #include <stdio.h>
00004 #include <fstream>
00005 #include <yaml-cpp/yaml.h>
00006 #include <vector>
00007 #include "ceres/ceres.h"
00008 #include "ceres/rotation.h"
00009 #include <iostream>
00010 #include <ros/ros.h>
00011 #include <industrial_extrinsic_cal/basic_types.h>
00012 #include <boost/foreach.hpp>
00013 #include <boost/random/normal_distribution.hpp>
00014 #include <boost/random/linear_congruential.hpp>
00015 #include <boost/random/uniform_int.hpp>
00016 #include <boost/random/uniform_real.hpp>
00017 #include <boost/random/variate_generator.hpp>
00018 #include <boost/generator_iterator.hpp>
00019 
00020 using std::ifstream;
00021 using std::string;
00022 using std::vector;
00023 using industrial_extrinsic_cal::Point3d;
00024 using industrial_extrinsic_cal::Pose6d;
00025 using industrial_extrinsic_cal::P_BLOCK;
00026 
00027 typedef boost::minstd_rand base_gen_type;
00028 base_gen_type gen(42);
00029 boost::normal_distribution<> normal_dist(0,1); // zero mean unit variance
00030 boost::variate_generator<base_gen_type&, boost::normal_distribution<> > randn(gen, normal_dist);
00031 
00032 typedef struct
00033 {
00034   double x; // image x
00035   double y; // image y
00036 } Observation;
00037 
00038 
00039 typedef struct
00040 {
00041   union
00042   {
00043     struct
00044     {
00045       double PB_extrinsics[6]; // parameter block for intrinsics
00046       double PB_intrinsics[4]; // parameter block for extrinsics
00047     };
00048     struct
00049     {
00050       double angle_axis[3];     // angle axis data
00051       double position[3];       // position data
00052       double focal_length_x;    // focal length x
00053       double focal_length_y;    // focal length y
00054       double center_x;          // center x
00055       double center_y;          // center y
00056     };
00057   };
00058   string camera_name;
00059   int height;
00060   int width;
00061   vector<Pose6d> pose_history;
00062   double pose_position_sigma;
00063   double pose_orientation_sigma;
00064   int num_observations;
00065 } Camera;
00066 
00067 class ObservationDataPoint{
00068 public:
00069   ObservationDataPoint(Camera* camera, int point_id, P_BLOCK p_position, Observation obs)
00070   {
00071     camera_ = camera;
00072     point_id_ = point_id;
00073     point_position_ = p_position;
00074     image_loc_ = obs;
00075   }
00076   ;
00077 
00078   ~ObservationDataPoint()
00079   {
00080   }
00081   ;
00082 
00083   Camera* camera_;
00084   int point_id_;
00085   P_BLOCK camera_extrinsics_;
00086   P_BLOCK point_position_;
00087   Observation image_loc_;
00088 };
00089 // end of class ObservationDataPoint
00090 
00091 
00092 // local prototypes
00093 void print_QTasH(double qx, double qy, double qz, double qw, double tx, double ty, double tz);
00094 void print_AATasH(double x, double y, double z, double tx, double ty, double tz);
00095 void print_AATasHI(double x, double y, double z, double tx, double ty, double tz);
00096 void print_AAasEuler(double x, double y, double z);
00097 void print_camera(Camera C, string words);
00098 
00099 Observation project_point_no_distortion(Camera C, Point3d P);
00100 void compute_observations(vector<Camera> &cameras, 
00101                           vector<Point3d> &points, 
00102                           double noise,
00103                           double max_dist,
00104                           vector<ObservationDataPoint> &observations);
00105 void perturb_cameras(vector<Camera> &cameras, double position_noise, double degrees_noise);
00106 void copy_cameras_wo_history(vector<Camera> &original_cameras, vector<Camera> & cameras);
00107 void copy_points(vector<Point3d> &original_points, vector<Point3d> & points);
00108 void add_pose_to_history(vector<Camera> &cameras, vector<Camera> &original_cameras);
00109 void compute_historic_pose_statistics(vector<Camera> &cameras);
00110 void parse_points(ifstream & points_input_file,vector<Point3d> &original_points);
00111 void parse_cameras(ifstream & cameras_input_file,vector<Camera> & original_cameras);
00112 void compare_cameras(vector<Camera> &C1, vector<Camera> &C2);
00113 void compare_observations(vector<ObservationDataPoint> &O1, vector<ObservationDataPoint> &O2);
00114 
00115 // computes image of point in cameras image plane
00116 Observation project_point_no_distortion(Camera C, Point3d P)
00117 {
00118   double p[3];                  // rotated into camera frame
00119   double point[3];              // world location of point
00120   double aa[3];                 // angle axis representation of camera transform
00121   double tx = C.position[0];    // location of origin in camera frame x
00122   double ty = C.position[1];    // location of origin in camera frame y
00123   double tz = C.position[2];    // location of origin in camera frame z
00124   double fx = C.focal_length_x; // focal length x
00125   double fy = C.focal_length_y; // focal length y
00126   double cx = C.center_x;       // optical center x
00127   double cy = C.center_y;       // optical center y
00128 
00129   aa[0] = C.angle_axis[0];
00130   aa[1] = C.angle_axis[1];
00131   aa[2] = C.angle_axis[2];
00132   point[0] = P.x;               
00133   point[1] = P.y;
00134   point[2] = P.z;
00135 
00137   ceres::AngleAxisRotatePoint(aa, point, p);
00138 
00139   // apply camera translation
00140   double xp1 = p[0] + tx;
00141   double yp1 = p[1] + ty;
00142   double zp1 = p[2] + tz;
00143 
00144   // scale into the image plane by distance away from camera
00145   double xp = xp1 / zp1;
00146   double yp = yp1 / zp1;
00147 
00148   // perform projection using focal length and camera center into image plane
00149   Observation O;
00150   O.x = fx * xp + cx;
00151   O.y = fy * yp + cy;
00152   return (O);
00153 }
00154 
00155 struct CameraReprjErrorNoDistortion
00156 {
00157   CameraReprjErrorNoDistortion(double ob_x, double ob_y, double fx, double fy, double cx, double cy) :
00158     ox_(ob_x), oy_(ob_y), fx_(fx), fy_(fy), cx_(cx), cy_(cy)
00159   {
00160   }
00161 
00162   template<typename T>
00163   bool operator()(const T* const c_p1, 
00164                   const T* point, 
00165                   T* resid) const
00166   {
00168     int q = 0; 
00169     const T& x = c_p1[q++]; 
00170     const T& y = c_p1[q++]; 
00171     const T& z = c_p1[q++]; 
00172     const T& tx = c_p1[q++]; 
00173     const T& ty = c_p1[q++]; 
00174     const T& tz = c_p1[q++]; 
00177     T aa[3];
00178     T p[3]; 
00179     aa[0] = x;
00180     aa[1] = y;
00181     aa[2] = z;
00182     ceres::AngleAxisRotatePoint(aa, point, p);
00183 
00185     T xp1 = p[0] + tx; 
00186     T yp1 = p[1] + ty;
00187     T zp1 = p[2] + tz;
00188 
00190     T xp = xp1 / zp1;
00191     T yp = yp1 / zp1;
00192 
00194     resid[0] = T(fx_) * xp + T(cx_) - T(ox_);
00195     resid[1] = T(fy_) * yp + T(cy_) - T(oy_);
00196 
00197     return true;
00198   } 
00202   static ceres::CostFunction* Create(const double o_x, const double o_y, 
00203                                      const double fx, const double fy,
00204                                      const double cx, const double cy)
00205   {
00206     return (new ceres::AutoDiffCostFunction<CameraReprjErrorNoDistortion, 2, 6, 3>(new CameraReprjErrorNoDistortion(o_x, o_y, fx, fy, cx, cy)));
00207   }
00208   double ox_; 
00209   double oy_; 
00210   double fx_; 
00211   double fy_; 
00212   double cx_; 
00213   double cy_; 
00214 };
00215 
00216 
00217 int main(int argc, char** argv)
00218 {
00219   vector<Point3d> points;
00220   vector<Point3d> original_points;
00221   vector<Camera>  cameras;
00222   vector<Camera>  original_cameras;
00223   vector<ObservationDataPoint> original_observations;
00224   vector<ObservationDataPoint> observations;
00225 
00226   // TODO use a parameter file for these, or make them arguments
00227   // hard coded constants 
00228   double camera_pos_noise     = 36.0/12.0; // 36 inches in feet
00229   double camera_or_noise      = 3.0; // degrees
00230   double image_noise          = .4; // pixels
00231   double max_detect_distance  = 999999570.0; // feet at which can't detect object
00232   int num_test_cases          = 100;
00233 
00234   google::InitGoogleLogging(argv[0]);
00235   if (argc != 3)
00236     {
00237       std::cerr << "usage: BaExCal <3Dpoints_file> <cameras_file>\n";
00238       return 1;
00239     }
00240 
00241 
00242   ifstream points_input_file(argv[1]);
00243   if (points_input_file.fail())
00244     {
00245       string temp(argv[1]);
00246       ROS_ERROR_STREAM("ERROR can't open points_input_file:  "<< temp.c_str());
00247       return (false);
00248     }
00249 
00250   ifstream cameras_input_file(argv[2]);
00251   if (cameras_input_file.fail())
00252     {
00253       string temp(argv[2]);
00254       ROS_ERROR_STREAM("ERROR can't open cameras_input_file:  "<< temp.c_str());
00255       return (false);
00256     }
00257 
00258   // read in the point and camera data from yaml files
00259   parse_points(points_input_file,original_points);
00260   parse_cameras(cameras_input_file,original_cameras);
00261 
00262   // this sets the nominal number of observations for each camera
00263   // NOTE, it may vary some if the noise is high
00264   compute_observations(original_cameras,original_points,0.0,max_detect_distance,observations);
00265 
00266   // Setup problem 1 to see if camera extrinsics may be recovered with fixed fiducials
00267   // create nominal observations of points using camera parameters
00268   // perturb camera positions and orientations
00269   // add all observation cost functions to problem
00270   // solve
00271   copy_cameras_wo_history(original_cameras,cameras);
00272   copy_points(original_points,points);
00273   compute_observations(cameras,points,0.0,max_detect_distance,original_observations);
00274   compute_observations(cameras,points,image_noise,max_detect_distance,observations);
00275   compare_observations(original_observations,observations);// shows noise is correct
00276   perturb_cameras(cameras,camera_pos_noise,camera_or_noise);
00277   compare_cameras(original_cameras,cameras); // shows how far apart they started
00278 
00279   // Create residuals for each observation in the bundle adjustment problem. The
00280   // parameters for cameras and points are added automatically.
00281   ceres::Problem problem1;
00282   BOOST_FOREACH(ObservationDataPoint obs, observations){
00283     double x  = obs.image_loc_.x;
00284     double y  = obs.image_loc_.y;
00285     double fx = obs.camera_->focal_length_x;
00286     double fy = obs.camera_->focal_length_y;
00287     double cx = obs.camera_->center_x;
00288     double cy = obs.camera_->center_y;
00289 
00290     ceres::CostFunction* cost_function = CameraReprjErrorNoDistortion::Create(x,y,fx,fy,cx,cy);
00291       
00292     double *extrinsics = obs.camera_->PB_extrinsics;
00293     double *points     = obs.point_position_;
00294     problem1.AddResidualBlock(cost_function, NULL, extrinsics, points);
00295     problem1.SetParameterBlockConstant(points); // fixed fiducials
00296 
00297   }
00298 
00299   // solve problem
00300   ceres::Solver::Options options;
00301   options.linear_solver_type = ceres::DENSE_SCHUR;
00302   options.minimizer_progress_to_stdout = false;
00303   options.max_num_iterations = 1000;
00304   
00305   // display results
00306   ceres::Solver::Summary summary;
00307   ceres::Solve(options, &problem1, &summary);
00308   // std::cout << summary.FullReport() << "\n";
00309 
00310   compare_cameras(original_cameras,cameras); // shows that cameras were recovered
00311 
00312   exit(1);
00313   copy_cameras_wo_history(original_cameras,cameras);
00314   copy_points(original_points,points);
00315   compute_observations(cameras,points,image_noise,max_detect_distance,observations);
00316   perturb_cameras(cameras,camera_pos_noise,camera_or_noise);
00317 
00318   ceres::Problem problem2;
00319   BOOST_FOREACH(ObservationDataPoint obs, observations){
00320     double x  = obs.image_loc_.x;
00321     double y  = obs.image_loc_.y;
00322     double fx = obs.camera_->focal_length_x;
00323     double fy = obs.camera_->focal_length_y;
00324     double cx = obs.camera_->center_x;
00325     double cy = obs.camera_->center_y;
00326 
00327     ceres::CostFunction* cost_function = CameraReprjErrorNoDistortion::Create(x,y,fx,fy,cx,cy);
00328       
00329     double *extrinsics = obs.camera_->PB_extrinsics;
00330     double *points     = obs.point_position_;
00331     problem2.AddResidualBlock(cost_function, NULL, extrinsics, points);
00332     problem2.SetParameterBlockConstant(points); // fixed fiducials
00333 
00334   }
00335   ceres::Solve(options, &problem2, &summary);
00336   std::cout << summary.FullReport() << "\n";
00337 
00338 
00339   // clear all pose histories of cameras
00340   BOOST_FOREACH(Camera &C, original_cameras){
00341     C.pose_history.clear();
00342   }
00343 
00344   // compute poses for cameras for a bunch of test cases
00345   for(int test_case=0; test_case<num_test_cases; test_case++){
00346     copy_cameras_wo_history(original_cameras, cameras);
00347     copy_points(original_points,points);
00348     compute_observations(cameras,points,image_noise,max_detect_distance,observations);
00349     perturb_cameras(cameras,camera_pos_noise,camera_or_noise);
00350 
00351     ceres::Problem problem;
00352     BOOST_FOREACH(ObservationDataPoint obs, observations){
00353       double x  = obs.image_loc_.x;
00354       double y  = obs.image_loc_.y;
00355       double fx = obs.camera_->focal_length_x;
00356       double fy = obs.camera_->focal_length_y;
00357       double cx = obs.camera_->center_x;
00358       double cy = obs.camera_->center_y;
00359 
00360       ceres::CostFunction* cost_function = CameraReprjErrorNoDistortion::Create(x,y,fx,fy,cx,cy);
00361       
00362       double *extrinsics = obs.camera_->PB_extrinsics;
00363       double *points     = obs.point_position_;
00364       problem.AddResidualBlock(cost_function, NULL, extrinsics, points);
00365       problem.SetParameterBlockConstant(points); // fixed fiducials
00366 
00367     }
00368     ceres::Solve(options, &problem, &summary);
00369 
00370     add_pose_to_history(cameras, original_cameras);
00371 
00372   }// end of test cases
00373   compute_historic_pose_statistics(original_cameras);
00374   
00375   BOOST_FOREACH(Camera &C, original_cameras){
00376     printf("%s\t:sigma distance  %lf angular %lf number_observations = %d\n",
00377            C.camera_name.c_str(),
00378            C.pose_position_sigma*12.0,
00379            C.pose_orientation_sigma*180/3.1415,
00380            C.num_observations);
00381   } 
00382 }
00383 
00384 // print a quaternion plus position as a homogeneous transform
00385 void print_QTasH(double qx, double qy, double qz, double qw, double tx, double ty, double tz)
00386 {
00387   double Rs11 = qw * qw + qx * qx - qy * qy - qz * qz;
00388   double Rs21 = 2.0 * qx * qy + 2.0 * qw * qz;
00389   double Rs31 = 2.0 * qx * qz - 2.0 * qw * qy;
00390 
00391   double Rs12 = 2.0 * qx * qy - 2.0 * qw * qz;
00392   double Rs22 = qw * qw - qx * qx + qy * qy - qz * qz;
00393   double Rs32 = 2.0 * qy * qz + 2.0 * qw * qx;
00394 
00395   double Rs13 = 2.0 * qx * qz + 2.0 * qw * qy;
00396   double Rs23 = 2.0 * qy * qz - 2.0 * qw * qx;
00397   double Rs33 = qw * qw - qx * qx - qy * qy + qz * qz;
00398 
00399   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", Rs11, Rs12, Rs13, tx);
00400   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", Rs21, Rs22, Rs23, ty);
00401   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", Rs31, Rs32, Rs33, tz);
00402   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", 0.0, 0.0, 0.0, 1.0);
00403 }
00404 
00405 // angle axis to homogeneous transform inverted
00406 void print_AATasH(double x, double y, double z, double tx, double ty, double tz)
00407 {
00408   double R[9];
00409   double aa[3];
00410   aa[0] = x;
00411   aa[1] = y;
00412   aa[2] = z;
00413   ceres::AngleAxisToRotationMatrix(aa, R);
00414   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", R[0], R[3], R[6], tx);
00415   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", R[1], R[4], R[7], ty);
00416   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", R[2], R[5], R[8], tz);
00417   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", 0.0, 0.0, 0.0, 1.0);
00418 }
00419 
00420 // angle axis to homogeneous transform
00421 void print_AATasHI(double x, double y, double z, double tx, double ty, double tz)
00422 {
00423   double R[9];
00424   double aa[3];
00425   aa[0] = x;
00426   aa[1] = y;
00427   aa[2] = z;
00428   ceres::AngleAxisToRotationMatrix(aa, R);
00429   double ix = -(tx * R[0] + ty * R[1] + tz * R[2]);
00430   double iy = -(tx * R[3] + ty * R[4] + tz * R[5]);
00431   double iz = -(tx * R[6] + ty * R[7] + tz * R[8]);
00432   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", R[0], R[1], R[2], ix);
00433   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", R[3], R[4], R[5], iy);
00434   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", R[6], R[7], R[8], iz);
00435   printf("%6.3lf %6.3lf %6.3lf %6.3lf\n", 0.0, 0.0, 0.0, 1.0);
00436 }
00437 
00438 void print_AAasEuler(double x, double y, double z)
00439 {
00440   double R[9];
00441   double aa[3];
00442   aa[0] = x;
00443   aa[1] = y;
00444   aa[2] = z;
00445   ceres::AngleAxisToRotationMatrix(aa, R);
00446   double rx = atan2(R[7], R[8]);
00447   double ry = atan2(-R[6], sqrt(R[7] * R[7] + R[8] * R[8]));
00448   double rz = atan2(R[3], R[0]);
00449   printf("rpy = %8.4f %8.4f %8.4f\n", rx, ry, rz);
00450 }
00451 void print_camera(Camera C, string words)
00452 {
00453   printf("%s\n", words.c_str());
00454   printf("Point in Camera frame to points in World frame Transform:\n");
00455   print_AATasHI(C.angle_axis[0], C.angle_axis[1], C.angle_axis[2], 
00456                 C.position[0],   C.position[1],   C.position[2]);
00457 
00458   printf("Points in World frame to points in Camera frame transform\n");
00459   print_AATasH(C.angle_axis[0], C.angle_axis[1], C.angle_axis[2], 
00460                C.position[0],   C.position[1],   C.position[2]);
00461 
00462   print_AAasEuler(C.angle_axis[0], C.angle_axis[1], C.angle_axis[2]);
00463   printf("fx = %8.3lf fy = %8.3lf\n", C.focal_length_x, C.focal_length_y);
00464   printf("cx = %8.3lf cy = %8.3lf\n", C.center_x, C.center_y);
00465 }
00466 
00467 
00468 void compute_observations(vector<Camera> &cameras, 
00469                           vector<Point3d> &points, 
00470                           double noise,
00471                           double max_dist,
00472                           vector<ObservationDataPoint> &observations)
00473 {
00474   double pnoise = noise/sqrt(1.58085);// magic number found by trial and error
00475   int n_observations = 0;
00476   observations.clear();
00477 
00478   // create nominal observations of points using camera parameters
00479   BOOST_FOREACH(Camera & C, cameras){
00480     C.num_observations = 0;
00481     int j=0;
00482     BOOST_FOREACH(Point3d &P, points){
00483       j++;
00484       // find image of point in camera
00485       double dx = C.position[0] - P.x;
00486       double dy = C.position[1] - P.y;
00487       double dz = C.position[2] - P.z;
00488       double distance = sqrt(dx*dx + dy*dy + dz*dz);
00489       if(distance < max_dist){
00490         Observation obs = project_point_no_distortion(C, P);
00491         obs.x += pnoise*randn(); // add observation noise
00492         obs.y += pnoise*randn();
00493         if(obs.x >= 0 && obs.x < C.width && obs.y >= 0 &&obs.y < C.height ){
00494           // save observation
00495           ObservationDataPoint new_obs(&(C),j,&(P.pb[0]),obs);
00496           observations.push_back(new_obs);
00497           C.num_observations++;
00498           n_observations++;
00499         } // end if observation within field of view
00500       } // end if point close enough to camera
00501     }// end for each fiducial point
00502    }// end for each camera
00503 }// end compute observations
00504 
00505 void perturb_cameras(vector<Camera> &cameras, double position_noise, double degrees_noise)
00506 {
00507   double pos_noise    = position_noise/sqrt(2.274625);
00508   double radian_noise = degrees_noise*3.1415/(180.0*sqrt(2.58047));
00509   BOOST_FOREACH(Camera &C, cameras){
00510       C.position[0]   += pos_noise*randn();     
00511       C.position[1]   += pos_noise*randn();
00512       C.position[2]   += pos_noise*randn();
00513       C.angle_axis[0] += radian_noise*randn();
00514       C.angle_axis[1] += radian_noise*randn();
00515       C.angle_axis[2] += radian_noise*randn();
00516   }
00517 }
00518 
00519 void copy_cameras_wo_history(vector<Camera> &original_cameras, vector<Camera> & cameras)
00520 {
00521   cameras.clear();
00522   BOOST_FOREACH(Camera &C, original_cameras){
00523     Camera newC = C;
00524     newC.pose_history.clear();
00525     newC.pose_position_sigma = 0;
00526     newC.pose_orientation_sigma = 0;
00527     newC.num_observations = 0;
00528     cameras.push_back(newC);
00529   }
00530 }
00531 
00532 void copy_points(vector<Point3d> &original_points, vector<Point3d> & points)
00533 {
00534   points.clear();
00535   BOOST_FOREACH(Point3d &P, original_points){
00536     points.push_back(P);
00537   }
00538 } 
00539 
00540 void add_pose_to_history(vector<Camera> &cameras, vector<Camera> & original_cameras)
00541 {
00542   if(cameras.size() != original_cameras.size())
00543     ROS_ERROR_STREAM("number of cameras in vectors do not match");
00544 
00545   for(int i=0;i<(int)cameras.size();i++){
00546     Pose6d pose;
00547     pose.x  = cameras[i].position[0];
00548     pose.y  = cameras[i].position[1];
00549     pose.z  = cameras[i].position[2];
00550     pose.ax = cameras[i].angle_axis[0];
00551     pose.ay = cameras[i].angle_axis[1];
00552     pose.az = cameras[i].angle_axis[2];
00553     original_cameras[i].pose_history.push_back(pose);
00554   }
00555 }
00556 
00557 void compute_historic_pose_statistics(vector<Camera> & cameras)
00558 {
00559   // calculate statistics of test case
00560   double mean_x, mean_y, mean_z, mean_ax, mean_ay, mean_az;
00561   double sigma_x, sigma_y, sigma_z, sigma_ax, sigma_ay, sigma_az;
00562   BOOST_FOREACH(Camera &C, cameras){
00563     mean_x = mean_y = mean_z = mean_ax = mean_ay = mean_az = 0.0;
00564     BOOST_FOREACH(Pose6d &p, C.pose_history){
00565       mean_x  += p.x;
00566       mean_y  += p.y;
00567       mean_z  += p.z;
00568       mean_ax += p.ax;
00569       mean_ay += p.ay;
00570       mean_az += p.az;
00571     }
00572     int num_poses = (int) C.pose_history.size();
00573     mean_x  = mean_x/num_poses;
00574     mean_y  = mean_y/num_poses;
00575     mean_z  = mean_z/num_poses;
00576     mean_ax = mean_ax/num_poses;
00577     mean_ay = mean_ay/num_poses;
00578     mean_az = mean_az/num_poses;
00579 
00580     sigma_x = sigma_y = sigma_z = sigma_ax = sigma_ay, sigma_az = 0.0;
00581     BOOST_FOREACH(Pose6d &p, C.pose_history){
00582       sigma_x  += (mean_x - p.x)*(mean_x - p.x);;
00583       sigma_y  += (mean_y - p.y)*(mean_y - p.y);;
00584       sigma_z  += (mean_z - p.z)*(mean_z - p.z);;
00585       sigma_ax += (mean_ax -p.ax)*(mean_ax -p.ax);;
00586       sigma_ay += (mean_ay -p.ay)*(mean_ay -p.ay);;
00587       sigma_az += (mean_az -p.az)*(mean_az -p.az);;
00588     }
00589     sigma_x  = sqrt(sigma_x/(num_poses + 1.0));
00590     sigma_y  = sqrt(sigma_y/(num_poses + 1.0));
00591     sigma_z  = sqrt(sigma_z/(num_poses + 1.0));
00592     sigma_ax = sqrt(sigma_ax/(num_poses + 1.0));
00593     sigma_ay = sqrt(sigma_ay/(num_poses + 1.0));
00594     sigma_az = sqrt(sigma_az/(num_poses + 1.0));
00595 
00596     double dv = sqrt(sigma_x*sigma_x + sigma_y*sigma_y + sigma_z*sigma_z);
00597     double av = sqrt(sigma_ax*sigma_ax + sigma_ay*sigma_ay + sigma_az*sigma_az);
00598 
00599     C.pose_position_sigma    = dv;
00600     C.pose_orientation_sigma = av;
00601   } // end for each camera
00602 }
00603 void parse_points(ifstream &points_input_file,vector<Point3d> &original_points)
00604 {
00605   // parse points
00606   try
00607     {
00608       YAML::Parser points_parser(points_input_file);
00609       YAML::Node points_doc;
00610       points_parser.GetNextDocument(points_doc);
00611 
00612       // read in all points
00613       const YAML::Node *points_node = points_doc.FindValue("points");
00614       for (int j = 0; j < points_node->size(); j++){
00615         const YAML::Node *pnt_node = (*points_node)[j].FindValue("pnt");
00616         vector<float> temp_pnt;
00617         (*pnt_node) >> temp_pnt;
00618         Point3d temp_pnt3d;
00619         temp_pnt3d.x = temp_pnt[0];
00620         temp_pnt3d.y = temp_pnt[1];
00621         temp_pnt3d.z = temp_pnt[2];
00622         original_points.push_back(temp_pnt3d);
00623       }
00624     }
00625   catch (YAML::ParserException& e){
00626     ROS_INFO_STREAM("Failed to read points file ");
00627   }
00628   ROS_INFO_STREAM("Successfully read in " <<(int) original_points.size() << " points");
00629 }
00630 
00631 void parse_cameras(ifstream &cameras_input_file,vector<Camera> & original_cameras)
00632 {
00633   Camera temp_camera;
00634   // parse cameras
00635   try
00636     {
00637       YAML::Parser camera_parser(cameras_input_file);
00638       YAML::Node camera_doc;
00639       camera_parser.GetNextDocument(camera_doc);
00640 
00641       // read in all static cameras
00642       if (const YAML::Node *camera_parameters = camera_doc.FindValue("cameras")){
00643         ROS_INFO_STREAM("Found "<<camera_parameters->size()<<" cameras ");
00644         for (unsigned int i = 0; i < camera_parameters->size(); i++){
00645           (*camera_parameters)[i]["camera_name"]    >> temp_camera.camera_name;
00646           // replaced with rotation matrix
00647           //        (*camera_parameters)[i]["angle_axis_ax"]  >> temp_camera.angle_axis[0];
00648           //        (*camera_parameters)[i]["angle_axis_ay"]  >> temp_camera.angle_axis[1];
00649           //        (*camera_parameters)[i]["angle_axis_az"]  >> temp_camera.angle_axis[2];
00650           const YAML::Node *rotation_node = (*camera_parameters)[i].FindValue("rotation");
00651           if(rotation_node->size() != 9){
00652             ROS_ERROR_STREAM("Rotation " << i << " has " << (int)rotation_node->size() << " pts");
00653           }
00654           // read in the transform from world to camera frame
00655           // invert it because camera object transforms world points into camera's frame
00656           double R[9],tx,ty,tz,angle_axis[3];
00657           for(int j=0;j<9;j++){
00658             (*rotation_node)[j]  >> R[j];
00659           }
00660           (*camera_parameters)[i]["position_x"]     >> tx;
00661           (*camera_parameters)[i]["position_y"]     >> ty;
00662           (*camera_parameters)[i]["position_z"]     >> tz;
00663           // NOTE: this Ceres function expects R in column major order, but we read R
00664           // in row by row which is effectively the inverse of R
00665           // the inverse is the rotation for transforming world points to camera points
00666           // but the translation portion is from camera to world
00667           double RI[9];
00668           RI[0] = R[0];  RI[1] = R[3]; RI[2] = R[6];
00669           RI[3] = R[1];  RI[4] = R[4]; RI[5] = R[7];
00670           RI[6] = R[2];  RI[7] = R[5]; RI[8] = R[8];
00671           ceres::RotationMatrixToAngleAxis(R,angle_axis);
00672           temp_camera.position[0] = -(tx * RI[0] + ty * RI[1] + tz * RI[2]);
00673           temp_camera.position[1] = -(tx * RI[3] + ty * RI[4] + tz * RI[5]);
00674           temp_camera.position[2] = -(tx * RI[6] + ty * RI[7] + tz * RI[8]);
00675 
00676           temp_camera.angle_axis[0] = angle_axis[0];
00677           temp_camera.angle_axis[1] = angle_axis[1];
00678           temp_camera.angle_axis[2] = angle_axis[2];
00679           (*camera_parameters)[i]["focal_length_x"] >> temp_camera.focal_length_x;
00680           (*camera_parameters)[i]["focal_length_y"] >> temp_camera.focal_length_y;
00681           (*camera_parameters)[i]["center_x"]       >> temp_camera.center_x;
00682           (*camera_parameters)[i]["center_y"]       >> temp_camera.center_y;
00683           (*camera_parameters)[i]["width"]          >> temp_camera.width;
00684           (*camera_parameters)[i]["height"]         >> temp_camera.height;
00685           temp_camera.num_observations = 0.0; // start out with none
00686           temp_camera.pose_history.clear(); // start out with no history
00687           original_cameras.push_back(temp_camera);
00688         }       // end of for each camera in file
00689       } // end if there are any cameras in file
00690     }
00691   catch (YAML::ParserException& e){
00692     ROS_INFO_STREAM("Failed to read in moving cameras from yaml file ");
00693     ROS_INFO_STREAM("camera name    = " << temp_camera.camera_name.c_str());
00694     ROS_INFO_STREAM("angle_axis_ax  = " << temp_camera.angle_axis[0]);
00695     ROS_INFO_STREAM("angle_axis_ay  = " << temp_camera.angle_axis[1]);
00696     ROS_INFO_STREAM("angle_axis_az  = " << temp_camera.angle_axis[2]);
00697     ROS_INFO_STREAM("position_x     = " << temp_camera.position[0]);
00698     ROS_INFO_STREAM("position_y     = " << temp_camera.position[1]);
00699     ROS_INFO_STREAM("position_z     = " << temp_camera.position[2]);
00700     ROS_INFO_STREAM("focal_length_x = " << temp_camera.focal_length_x);
00701     ROS_INFO_STREAM("focal_length_y = " << temp_camera.focal_length_y);
00702     ROS_INFO_STREAM("center_x       = " << temp_camera.center_x);
00703     ROS_INFO_STREAM("center_y       = " << temp_camera.center_y);
00704   }
00705   ROS_INFO_STREAM("Successfully read in " << (int) original_cameras.size() << " cameras");
00706 }
00707 void compare_cameras(vector<Camera> &C1, vector<Camera> &C2)
00708 {
00709   if(C1.size()!= C2.size()){
00710     ROS_ERROR_STREAM("compare_cameras() camera vectors different in length");
00711   }
00712   double d_x,d_y,d_z,dist;
00713   double mean_dist=0;
00714   double sigma_dist=0;
00715   double d_ax,d_ay,d_az,angle;
00716   double mean_angle=0;
00717   double sigma_angle=0;
00718   for(int i=0;i<(int)C1.size();i++){
00719     d_x = C1[i].position[0]-C2[i].position[0];
00720     d_y = C1[i].position[1]-C2[i].position[1];
00721     d_z = C1[i].position[2]-C2[i].position[2];
00722     d_ax = C1[i].angle_axis[0]-C2[i].angle_axis[0];
00723     d_ay = C1[i].angle_axis[1]-C2[i].angle_axis[1];
00724     d_az = C1[i].angle_axis[2]-C2[i].angle_axis[2];
00725     dist = sqrt(d_x*d_x + d_y*d_y + d_z*d_z);
00726     angle = sqrt(d_ax*d_ax + d_ay*d_ay + d_az*d_az);
00727     mean_dist += dist;
00728     mean_angle += angle;
00729   }
00730   mean_dist = mean_dist/(int)C1.size();
00731   mean_angle = mean_angle/(int)C1.size();
00732   for(int i=0;i<(int)C1.size();i++){
00733     d_x = C1[i].position[0]-C2[i].position[0];
00734     d_y = C1[i].position[1]-C2[i].position[1];
00735     d_z = C1[i].position[2]-C2[i].position[2];
00736     d_ax = C1[i].angle_axis[0]-C2[i].angle_axis[0];
00737     d_ay = C1[i].angle_axis[1]-C2[i].angle_axis[1];
00738     d_az = C1[i].angle_axis[2]-C2[i].angle_axis[2];
00739     dist = sqrt(d_x*d_x + d_y*d_y + d_z*d_z);
00740     angle = sqrt(d_ax*d_ax + d_ay*d_ay + d_az*d_az);
00741     sigma_dist  += (dist-mean_dist)*(dist-mean_dist);
00742     sigma_angle += (angle-mean_angle)*(angle-mean_angle);
00743   }
00744   sigma_dist = sqrt(sigma_dist/C1.size());
00745   sigma_angle = sqrt(sigma_angle/C1.size());
00746   ROS_INFO_STREAM("mean distance between camera poses = "<<mean_dist*12 <<" inches " << sigma_dist);
00747   ROS_INFO_STREAM("mean angle between camera poses = "<<mean_angle*180/3.1415 << " degrees " << sigma_angle*180/3.1415);
00748 }
00749 
00750 void compare_observations(vector<ObservationDataPoint> &O1, vector<ObservationDataPoint> &O2)
00751 {
00752   if(O1.size()!= O2.size()){
00753     ROS_ERROR_STREAM("compare_observations() vectors different in length");
00754   }
00755   double d_x,d_y,dist;
00756   double mean_dist=0.0;
00757   for(int i=0;i<(int)O1.size();i++){
00758     d_x = O1[i].image_loc_.x - O2[i].image_loc_.x;
00759     d_y = O1[i].image_loc_.y - O2[i].image_loc_.y;
00760     dist = sqrt(d_x*d_x + d_y*d_y);
00761     mean_dist += dist;
00762   }
00763   mean_dist = mean_dist/(int)O1.size();
00764   ROS_INFO_STREAM("mean distance between observations = "<<mean_dist <<" pixels");
00765 }


industrial_extrinsic_cal
Author(s): Chris Lewis
autogenerated on Wed Aug 26 2015 12:00:27