frame.cpp
Go to the documentation of this file.
00001 /*********************************************************************
00002 * Software License Agreement (BSD License)
00003 *
00004 *  Copyright (c) 2009, Willow Garage, Inc.
00005 *  All rights reserved.
00006 *
00007 *  Redistribution and use in source and binary forms, with or without
00008 *  modification, are permitted provided that the following conditions
00009 *  are met:
00010 *
00011 *   * Redistributions of source code must retain the above copyright
00012 *     notice, this list of conditions and the following disclaimer.
00013 *   * Redistributions in binary form must reproduce the above
00014 *     copyright notice, this list of conditions and the following
00015 *     disclaimer in the documentation and/or other materials provided
00016 *     with the distribution.
00017 *   * Neither the name of the Willow Garage nor the names of its
00018 *     contributors may be used to endorse or promote products derived
00019 *     from this software without specific prior written permission.
00020 *
00021 *  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00022 *  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00023 *  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00024 *  FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00025 *  COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00026 *  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00027 *  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00028 *  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00029 *  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00030 *  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00031 *  ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00032 *  POSSIBILITY OF SUCH DAMAGE.
00033 *********************************************************************/
00034 
00035 
00036 #include <frame_common/frame.h>
00037 
00038 using namespace Eigen;
00039 using namespace std;
00040 using namespace pcl;
00041 
00042 // elapsed time in milliseconds
00043 #include <sys/time.h>
00044 #include <opencv2/nonfree/nonfree.hpp>
00045 static double mstime()
00046 {
00047   timeval tv;
00048   gettimeofday(&tv,NULL);
00049   long long ts = tv.tv_sec;
00050   ts *= 1000000;
00051   ts += tv.tv_usec;
00052   return (double)ts*.001;
00053 }
00054 
00055 namespace frame_common
00056 {
00057 
00058   void Frame::setCamParams(const CamParams &c)
00059   {
00060     // monocular
00061     cam = c;
00062     iproj.setZero();
00063     iproj(0,0) = c.fx;
00064     iproj(1,1) = c.fy;
00065     iproj(0,2)=c.cx;
00066     iproj(1,2)=c.cy;
00067     iproj(2,2)=1.0;
00068 
00069     // stereo
00070     disp_to_cart(0,0) = 1/c.fx;
00071     disp_to_cart(1,1) = 1/c.fy;
00072     disp_to_cart(0,3) = -c.cx/c.fx;
00073     disp_to_cart(1,3) = -c.cy/c.fy;
00074     disp_to_cart(2,3) = 1.0;
00075     disp_to_cart(3,2) = 1/(c.tx*c.fx);
00076 
00077     cart_to_disp.setZero();
00078     cart_to_disp(0,0) = c.fx;
00079     cart_to_disp(1,1) = c.fy;
00080     cart_to_disp(0,2) = c.cx;
00081     cart_to_disp(1,2) = c.cy;
00082     cart_to_disp(2,3) = c.fx*c.tx;
00083     cart_to_disp(3,2) = 1.0;
00084 
00085     if (c.tx > 0.0)
00086       isStereo = true;
00087     else
00088       isStereo = false;
00089   }
00090 
00091 
00092   // return x,y,d from X,Y,Z
00093   Eigen::Vector3d Frame::cam2pix(const Eigen::Vector3d &cam_coord) const
00094   {
00095     double xl = cam.fx * cam_coord[0] + cam.cx * cam_coord[2];
00096     double y = cam.fy * cam_coord[1] + cam.cy * cam_coord[2];
00097     double w = cam_coord[2];
00098     double xd = cam.fx * cam.tx;
00099     double rw = 1.0/w;
00100     double y_rw = y * rw;
00101     return Eigen::Vector3d(xl*rw, y_rw, xd*rw);
00102   }
00103 
00104   // return X,Y,Z from x,y,d
00105   Eigen::Vector3d Frame::pix2cam(const Eigen::Vector3d &pix_coord) const
00106   {
00107     double x = pix_coord[0] - cam.cx;
00108     double y = pix_coord[1] - cam.cy;
00109     double z = cam.fx;
00110     double w = pix_coord[2]/cam.tx;
00111     return Eigen::Vector3d(x/w, y/w, z/w);
00112   }
00113 
00114 
00115   // translate kpts by 6DOF transformation of frame
00116   void Frame::setTKpts(Eigen::Vector4d trans, Eigen::Quaterniond rot)
00117   {
00118     Vector3d tr;
00119     tr = trans.head<3>();
00120     // set up 3x4 transformation from kpt+disp to kpt
00121     Matrix4d Q;
00122     Q << 1.0, 0.0, 0.0, -cam.cx, // fy should enter in here somewhere...
00123          0.0, 1.0, 0.0, -cam.cy,
00124          0.0, 0.0, 0.0, cam.fx,
00125          0.0, 0.0, 1.0/cam.tx, 0;
00126     Matrix<double,3,4> P;
00127     P << cam.fx, 0.0, cam.cx, 0.0,
00128          0.0, cam.fx, cam.cy, 0.0,
00129          0.0, 0.0, 1.0, 0.0;
00130     // 3D point transform - inverse of frame motion
00131     Matrix4d T;
00132     T.setZero();
00133     Matrix3d R = rot.toRotationMatrix();
00134     T.block<3,3>(0,0) = R.transpose();
00135     T.block<3,1>(0,3) = -R*tr;
00136     T(3,3) = 1.0;
00137     
00138     P = P*T*Q;
00139 
00140     // go through points and set up transformed ones
00141     tkpts.resize(kpts.size());
00142     Vector4d v(0.0,0.0,0.0,1.0);
00143     for (int i=0; i<(int)kpts.size(); i++)
00144       {
00145         Vector3d vk;
00146         v(0) = kpts[i].pt.x;
00147         v(1) = kpts[i].pt.y;
00148         v(2) = disps[i];
00149         vk = P*v;
00150         tkpts[i].pt.x = vk(0)/vk(2);
00151         tkpts[i].pt.y = vk(1)/vk(2);
00152       }
00153   }
00154 
00155 
00156 
00157 
00158   // frame processor
00159 
00160   FrameProc::FrameProc(int v)
00161   {
00162     // detector (by default, FAST)
00163     detector = new cv::GridAdaptedFeatureDetector(new cv::FastFeatureDetector(v), 1000);
00164     // detector = new cv::FastFeatureDetector(v);
00165 
00166     // descriptor (by default, SURF)
00167     extractor = new cv::SurfDescriptorExtractor;
00168 
00169     // stereo
00170     ndisp = 64;
00171     doSparse = false;           // use dense stereo by default
00172   }
00173 
00174   void FrameProc::setFrameDetector(const cv::Ptr<cv::FeatureDetector>& new_detector)
00175   {
00176     detector = new_detector;
00177   }
00178 
00179   void FrameProc::setFrameDescriptor(const cv::Ptr<cv::DescriptorExtractor>& new_extractor)
00180   {
00181     extractor = new_extractor;
00182   }
00183 
00184   // set up mono frame
00185   void FrameProc::setMonoFrame(Frame& frame, const cv::Mat &img, const cv::Mat& mask )
00186   {
00187     frame.img = img;
00188 
00189     // set keypoints and descriptors
00190     frame.kpts.clear();
00191     //double t0 = mstime();
00192     detector->detect(img, frame.kpts, mask);
00193     //double t1 = mstime();
00194     extractor->compute(img, frame.kpts, frame.dtors);
00195     //double t2 = mstime();
00196 
00197     frame.pts.resize(frame.kpts.size());
00198     frame.goodPts.assign(frame.kpts.size(), false);
00199     frame.disps.assign(frame.kpts.size(), 10);
00200   }
00201 
00202   // set up stereo frame
00203   // assumes frame has camera params already set
00204   // <nfrac> is nonzero if <imgr> is a dense stereo disparity image
00205   void FrameProc::setStereoFrame(Frame &frame, const cv::Mat &img, const cv::Mat &imgr, const cv::Mat &left_mask, int nfrac,
00206                                  bool setPointCloud)
00207   {
00208     setMonoFrame( frame, img, left_mask );
00209     frame.imgRight = imgr;
00210 
00211     // set stereo, optionally with point cloud
00212     setStereoPoints( frame, nfrac, setPointCloud );
00213   }
00214 
00215   // set up stereo frame
00216   // assumes frame has camera params already set
00217   // <nfrac> is nonzero if <imgr> is a dense stereo image
00218   void FrameProc::setStereoFrame(Frame &frame, const cv::Mat &img, const cv::Mat &imgr, int nfrac,
00219                                  bool setPointCloud)
00220   {
00221     setStereoFrame( frame, img, imgr, cv::Mat(), nfrac, setPointCloud );
00222   }
00223 
00224   // set up stereo points
00225   void FrameProc::setStereoPoints(Frame &frame, int nfrac, bool setPointCloud)
00226   {
00227     //  if (img.rows == 0 || imgRight.rows == 0)
00228     //    return;
00229     frame.disps.clear();
00230 
00231 
00232     FrameStereo *st;
00233     if (doSparse)
00234       st = new SparseStereo(frame.img,frame.imgRight,true,ndisp);
00235     else if (nfrac > 0)
00236       { 
00237         DenseStereo::ndisp = ndisp;
00238         st = new DenseStereo(frame.img,frame.imgRight,ndisp,1.0/(double)nfrac);
00239       }
00240     else
00241       { 
00242         DenseStereo::ndisp = ndisp;
00243         st = new DenseStereo(frame.img,frame.imgRight,ndisp);
00244       }
00245 
00246     int nkpts = frame.kpts.size();
00247     frame.goodPts.resize(nkpts);
00248     frame.pts.resize(nkpts);
00249     frame.disps.resize(nkpts);
00250 
00251     #pragma omp parallel for shared( st )
00252     for (int i=0; i<nkpts; i++)
00253       {
00254         double disp = st->lookup_disparity(frame.kpts[i].pt.x,frame.kpts[i].pt.y);
00255         frame.disps[i] = disp;
00256         if (disp > 0.0)           // good disparity
00257           {
00258             frame.goodPts[i] = true;
00259             Vector3d pt(frame.kpts[i].pt.x,frame.kpts[i].pt.y,disp);
00260             frame.pts[i].head(3) = frame.pix2cam(pt);
00261             frame.pts[i](3) = 1.0;
00262             //          cout << pts[i].transpose() << endl;
00263           }
00264         else
00265           frame.goodPts[i] = false;
00266       }
00267 
00268     if (!setPointCloud) return;
00269 
00270     // convert disparities and image to point cloud with luminance/RGB
00271     double cx = frame.cam.cx;
00272     double cy = frame.cam.cy;
00273     double fx = frame.cam.fx;
00274     double tx = frame.cam.tx;
00275 
00276     // Fill in sparse point cloud message
00277     frame.dense_pointcloud.points.resize(0);
00278   
00279     for (int v=0; v<(int)frame.img.rows; v++)
00280       {
00281         const uchar *imrow = frame.img.ptr<uchar>(v);
00282         for (int u=0; u<(int)frame.img.cols; u++, imrow++)
00283           {
00284             double disp = st->lookup_disparity(u,v);
00285             if (disp > 0)      // valid point
00286               {
00287                 pcl::PointXYZRGB pt;
00288                 // x,y,z
00289                 double w = tx / disp;
00290                 pt.x = ((double)u - cx)*w;
00291                 pt.y = ((double)v - cy)*w;
00292                 pt.z = fx*w;
00293 
00294                 // color, as a float (????)
00295                 uint8_t g = *imrow;
00296                 int32_t rgb = (g << 16) | (g << 8) | g;
00297                 pt.rgb = *(float*)(&rgb);
00298                 frame.dense_pointcloud.points.push_back(pt);
00299               }
00300           }
00301       }
00302 
00303     delete st;
00304   }
00305   
00306 
00307     void PointcloudProc::setPointcloud(Frame &frame, const pcl::PointCloud<pcl::PointXYZRGB>& input_cloud) const
00308     {
00309       reduceCloud(input_cloud, frame.pointcloud);
00310       
00311       // For now, let's keep a 1-1 mapping between pl_pts, keypts, etc., etc.
00312       // Basically replicating all the info in the pointcloud but whatever.
00313       // TODO: Do something more intelligent than this.
00314       frame.pl_pts.clear();
00315       frame.pl_kpts.clear();
00316       frame.pl_normals.clear();
00317       frame.pl_ipts.clear();
00318       
00319       unsigned int ptcloudsize = frame.pointcloud.points.size();
00320       frame.pl_pts.resize(ptcloudsize);
00321       frame.pl_kpts.resize(ptcloudsize);
00322       frame.pl_normals.resize(ptcloudsize);
00323       frame.pl_ipts.resize(ptcloudsize);
00324       
00325       #pragma omp parallel for
00326       for (unsigned int i=0; i < frame.pointcloud.points.size(); i++)
00327       {
00328         PointXYZRGBNormal &pt = frame.pointcloud.points[i];
00329         
00330         frame.pl_pts[i] = Eigen::Vector4d(pt.x, pt.y, pt.z, 1.0);
00331         frame.pl_normals[i] = Eigen::Vector4d(pt.normal[0], pt.normal[1], pt.normal[2], 1.0);
00332         frame.pl_kpts[i] = projectPoint(frame.pl_pts[i], frame.cam);
00333         frame.pl_ipts[i] = -1;
00334       }
00335     }
00336     
00337     void PointcloudProc::match(const Frame& frame0, const Frame& frame1, 
00338           const Eigen::Vector3d& trans, const Eigen::Quaterniond& rot, 
00339           std::vector<cv::DMatch>& matches) const
00340     {
00341       PointCloud<PointXYZRGBNormal> transformed_cloud;
00342       
00343       // First, transform the current frame. (Is this inverse?) (Or just transform the other cloud?)
00344       //transformPointCloudWithNormals<PointXYZRGBNormal>(frame1.cloud, transformed_cloud, -trans.cast<float>(), rot.cast<float>().conjugate());
00345       
00346       //transformPointCloudWithNormals<PointXYZRGBNormal>(frame0.pointcloud, transformed_cloud, -trans.cast<float>(), rot.cast<float>().conjugate());
00347       transformPointCloudWithNormals<PointXYZRGBNormal>(frame0.pointcloud, transformed_cloud, Vector3f(0,0,0), rot.cast<float>().conjugate());
00348       transformPointCloudWithNormals<PointXYZRGBNormal>(transformed_cloud, transformed_cloud, -trans.cast<float>(), Quaternionf(1, 0, 0, 0));
00349       //pcl::io::savePCDFileASCII ("cloud0.pcd", transformed_cloud);
00350       //pcl::io::savePCDFileASCII ("cloud1.pcd", frame1.pointcloud);
00351       
00352       // Optional/TODO: Perform ICP to further refine estimate.
00353       /*PointCloud<PointXYZRGBNormal> cloud_reg;
00354 
00355       IterativeClosestPointNonLinear<PointXYZRGBNormal, PointXYZRGBNormal> reg;
00356       reg.setInputCloud (boost::make_shared<const PointCloud<PointXYZRGBNormal> > (transformed_cloud));
00357       reg.setInputTarget (boost::make_shared<const PointCloud<PointXYZRGBNormal> > (frame1.pointcloud));
00358       reg.setMaximumIterations(50);
00359       reg.setTransformationEpsilon (1e-8);
00360 
00361       reg.align(cloud_reg); */
00362             
00363       // Find matches between pointclouds in frames. (TODO: also compare normals)
00364       std::vector<int> f0_indices, f1_indices;
00365       getMatchingIndices(transformed_cloud, frame1.pointcloud, f0_indices, f1_indices);
00366       
00367       // Fill in keypoints and projections of relevant features.
00368       // Currently just done when setting the pointcloud.
00369       
00370       // Convert matches into the correct format.
00371       matches.clear();
00372       // Starting at i=1 as a hack to not let through (0,0,0) matches (why is this in the ptcloud?))
00373       
00374       #pragma omp parallel for shared( transformed_cloud, f0_indices, f1_indices )
00375       for (unsigned int i=1; i < f0_indices.size(); i++)
00376       {
00377         const PointXYZRGBNormal &pt0 = transformed_cloud.points[f0_indices[i]];
00378         const PointXYZRGBNormal &pt1 = frame1.pointcloud.points[f1_indices[i]];
00379         
00380         // Figure out distance and angle between normals
00381         Quaterniond normalquat;
00382         Vector3d norm0(pt0.normal[0], pt0.normal[1], pt0.normal[2]), norm1(pt1.normal[0], pt1.normal[1], pt1.normal[2]);
00383         normalquat.setFromTwoVectors(norm0, norm1);
00384         //double angledist = normalquat.angularDistance(normalquat.Identity());
00385         double dist = (Vector3d(pt0.x, pt0.y, pt0.z)-Vector3d(pt1.x, pt1.y, pt1.z)).norm();
00386         
00387         /* Vector4d p0_pt = Vector4d(pt0.x, pt0.y, pt0.z, 1.0);
00388         Vector3d expected_proj = projectPoint(p0_pt, frame0.cam);
00389         
00390         Vector3d diff = expected_proj - frame1.pl_kpts[f1_indices[i]].head<3>();
00391         diff(2) = diff(2) - diff(0);
00392         
00393         printf("[Proj difference] %f %f %f\n", diff(0), diff(1), diff(2)); */
00394         
00395         if ((norm0 - norm1).norm() < 0.5 && dist < 0.2)
00396           #pragma omp critical
00397           matches.push_back(cv::DMatch(f0_indices[i], f1_indices[i], dist));
00398       }
00399       
00400       printf("[Frame] Found %d matches, then converted %d matches.\n", (int)f0_indices.size(), (int)matches.size());
00401     }
00402     
00403     void PointcloudProc::getMatchingIndices(const PointCloud<PointXYZRGBNormal>& input, 
00404               const PointCloud<PointXYZRGBNormal>& output, 
00405               std::vector<int>& input_indices, std::vector<int>& output_indices) const
00406     {
00407       // TODO: Don't calculate the KDTree each time.
00408       KdTreeFLANN<PointXYZRGBNormal> input_tree, output_tree;
00409         
00410       input_tree.setInputCloud(boost::make_shared<const PointCloud<PointXYZRGBNormal> >(input));
00411       output_tree.setInputCloud(boost::make_shared<const PointCloud<PointXYZRGBNormal> >(output));
00412       
00413       // Iterate over the output tree looking for all the input points and finding
00414       // nearest neighbors.
00415       #pragma omp parallel for shared( input_tree, output_tree )
00416       for (unsigned int i = 0; i < input.points.size(); i++)
00417       {
00418         PointXYZRGBNormal input_pt = input.points[i];
00419         std::vector<int> input_indexvect(1), output_indexvect(1); // Create a vector of size 1.
00420         std::vector<float> input_distvect(1), output_distvect(1);
00421         
00422         // Find the nearest neighbor of the input point in the output tree.
00423         output_tree.nearestKSearch(input_pt, 1, input_indexvect, input_distvect);
00424         
00425         PointXYZRGBNormal output_pt = output.points[input_indexvect[0]];
00426         
00427         // Find the nearest neighbor of the output point in the input tree.
00428         input_tree.nearestKSearch(output_pt, 1, output_indexvect, output_distvect);
00429         
00430         // If they match, add them to the match vectors.
00431         #pragma omp critical
00432         if (output_indexvect[0] == (int)i)
00433         {
00434           input_indices.push_back(i);
00435           output_indices.push_back(input_indexvect[0]);
00436         }
00437       }
00438     }
00439     
00440     // Subsample cloud for faster matching and processing, while filling in normals.
00441     void PointcloudProc::reduceCloud(const PointCloud<PointXYZRGB>& input, PointCloud<PointXYZRGBNormal>& output) const
00442     {
00443       PointCloud<PointXYZRGB> cloud_nan_filtered, cloud_box_filtered, cloud_voxel_reduced;
00444       PointCloud<Normal> normals;
00445       PointCloud<PointXYZRGBNormal> cloud_normals;
00446       
00447       std::vector<int> indices;
00448       
00449       // Filter out nans.
00450       removeNaNFromPointCloud(input, cloud_nan_filtered, indices);
00451       indices.clear();
00452       
00453       // Filter out everything outside a [200x200x200] box.
00454       Eigen::Vector4f min_pt(-100, -100, -100, -100);
00455       Eigen::Vector4f max_pt(100, 100, 100, 100);
00456       getPointsInBox(cloud_nan_filtered, min_pt, max_pt, indices);
00457       
00458       ExtractIndices<PointXYZRGB> boxfilter;
00459       boxfilter.setInputCloud(boost::make_shared<const PointCloud<PointXYZRGB> >(cloud_nan_filtered));
00460       boxfilter.setIndices (boost::make_shared<vector<int> > (indices));
00461       boxfilter.filter(cloud_box_filtered);
00462       
00463       // Reduce pointcloud
00464       VoxelGrid<PointXYZRGB> voxelfilter;
00465       voxelfilter.setInputCloud (boost::make_shared<const PointCloud<PointXYZRGB> > (cloud_box_filtered));
00466       voxelfilter.setLeafSize (0.05, 0.05, 0.05);
00467       //      voxelfilter.setLeafSize (0.1, 0.1, 0.1);
00468       voxelfilter.filter (cloud_voxel_reduced);
00469       
00470       // Compute normals
00471       NormalEstimation<PointXYZRGB, Normal> normalest;
00472       normalest.setViewPoint(0, 0, 0);
00473       normalest.setSearchMethod (boost::make_shared<search::KdTree<PointXYZRGB> > ());
00474       //normalest.setKSearch (10);
00475       normalest.setRadiusSearch (0.25);
00476       //      normalest.setRadiusSearch (0.4);
00477       normalest.setInputCloud(boost::make_shared<const PointCloud<PointXYZRGB> >(cloud_voxel_reduced));
00478       normalest.compute(normals);
00479       
00480       pcl::concatenateFields (cloud_voxel_reduced, normals, cloud_normals);
00481 
00482       // Filter based on curvature
00483       PassThrough<PointXYZRGBNormal> normalfilter;
00484       normalfilter.setFilterFieldName("curvature");
00485       //      normalfilter.setFilterLimits(0.0, 0.2);
00486       normalfilter.setFilterLimits(0.0, 0.2);
00487       normalfilter.setInputCloud(boost::make_shared<const PointCloud<PointXYZRGBNormal> >(cloud_normals));
00488       normalfilter.filter(output);
00489     }
00490     
00491     Eigen::Vector3d PointcloudProc::projectPoint(Eigen::Vector4d& point, CamParams cam) const
00492     {
00493       Eigen::Vector3d keypoint;
00494       
00495       keypoint(0) = (cam.fx*point.x()) / point.z() + cam.cx;
00496       keypoint(1) = (cam.fy*point.y()) / point.z() + cam.cy;
00497       keypoint(2) = (cam.fx*(point.x()-cam.tx)/point.z() + cam.cx);
00498       
00499       return keypoint;
00500     }
00501 
00502 } // end namespace frame_common
00503 
00504   // Detect keypoints
00505   // this returns no keypoints on my sample images...
00506   // boost::shared_ptr<f2d::FeatureDetector> detector( new f2d::StarFeatureDetector(16, 100) );
00507   //  boost::shared_ptr<f2d::FeatureDetector> detector( new f2d::StarFeatureDetector );
00508   //  boost::shared_ptr<f2d::FeatureDetector> detector( new f2d::SurfFeatureDetector(4000.0) );
00509   //  boost::shared_ptr<f2d::FeatureDetector> detector( new f2d::HarrisFeatureDetector(300, 5) );
00510   //  boost::shared_ptr<f2d::FeatureDetector> detector( new f2d::FastFeatureDetector(50) );
00511 
00512   // Compute descriptors
00513   //  boost::shared_ptr<f2d::DescriptorExtractor> extractor(cd); // this is the calonder descriptor
00514   //  boost::shared_ptr<f2d::DescriptorExtractor> extractor( new f2d::SurfDescriptorExtractor(3, 4, true) );


frame_common
Author(s): Kurt Konolige
autogenerated on Thu Jan 2 2014 12:12:04