SurfPlanarDetector.cpp
Go to the documentation of this file.
00001 
00033 #include "SurfPlanarDetector.h"
00034 #include "PlanarVisualizationModel.h"
00035 #include "ObjectModel.h"
00036 #include "ObjectDetectorMethod.h"
00037 #include "re_msgs/DetectedObject.h"
00038 
00039 #include "DUtils.h"
00040 #include "DUtilsCV.h"
00041 #include "DVision.h"
00042 
00043 #include "debug.h"
00044 
00045 #include <vector>
00046 #include <iomanip>
00047 
00048 typedef DVision::PMVS::PLYFile::PLYPoint PLYPoint;
00049 typedef DVision::SurfSet SurfSet;
00050 using namespace std;
00051 
00052 // ---------------------------------------------------------------------------
00053 
00058 
00059 #ifdef IF_DEBUG_MODE
00060   #undef IF_DEBUG_MODE
00061 #endif
00062 
00063 #if PLANAR_DEBUG_ENABLED
00064   #define IF_DEBUG_MODE(X) if(debugMode()){ X; }
00065   #define IF_DEBUG_OR_VISUALIZATION_MODE(X) \
00066     if(debugMode() || visualizationMode()){ X; }
00067 #else
00068   #define IF_DEBUG_MODE(X) if(0){}
00069   // this definition enables the use of "else"
00070   #define IF_DEBUG_OR_VISUALIZATION_MODE(X) \
00071     if(visualizationMode()){ X; }
00072 #endif
00073 
00074 // ---------------------------------------------------------------------------
00075 
00076 void SurfPlanarDetector::detect(ObjectDetectorMethod::DetectionData &data,
00077   ObjectModel &model,
00078   re_msgs::DetectedObject &detection)
00079 {
00080 
00081   #if PLANAR_DEBUG_ENABLED
00082     // sets prefix of debug files
00083     {
00084       stringstream ss;
00085       ss << m_debug_dir << "/"
00086         << setw(2) << setfill('0') << m_detect_counter << "_planar_";
00087       m_debug_prefix = ss.str();
00088     }
00089     std::string original_debug_prefix = m_debug_prefix;
00090     m_detect_counter++;
00091   #endif
00092 
00093   if(!data.surfdata.provided)
00094   {
00095     // calculate surfs
00096     data.surfdata.surfset.Extract(data.img, 800, false);
00097     data.surfdata.provided = true;
00098   }
00099 
00100   IF_DEBUG_OR_VISUALIZATION_MODE
00101   (
00102     cv::Mat img = data.img.clone();
00103     DUtilsCV::Drawing::drawKeyPoints(img, data.surfdata.surfset.keys);
00104 
00105     IF_DEBUG_MODE
00106     (
00107       std::string filename = m_debug_prefix + "0_scene_keypoints.png";
00108       cv::imwrite(filename, img);
00109     )
00110 
00111     if(visualizationMode()) visualize(img, "Searching...");
00112   )
00113 
00114   const SurfSet& scene_surfset = data.surfdata.surfset;
00115   vector<int> face_indices;
00116   vector<vector<int> > model_indices;
00117   vector<vector<int> > scene_indices;
00118   vector<vector<float> > distances;
00119 
00120   model.detectFaces(scene_surfset, face_indices, model_indices,
00121     scene_indices, distances, 1, 1, 0.6);
00122 
00123   bool found = false;
00124   if(!face_indices.empty())
00125   {
00126     for(unsigned int i = 0; i < face_indices.size(); ++i)
00127     {
00128       int idx = face_indices[i];
00129 
00130       IF_DEBUG_MODE
00131       (
00132         stringstream ss;
00133         ss << original_debug_prefix << "1_it_" << i << "_face_" << idx << "_";
00134         m_debug_prefix = ss.str();
00135         std::string filename = m_debug_prefix + "0_match.png";
00136 
00137         DUtilsCV::Drawing::saveCorrespondenceImage(filename, data.img,
00138           model.Faces[idx].image, scene_surfset.keys,
00139           model.Faces[idx].surf.keys,
00140           scene_indices[i], model_indices[i]);
00141       )
00142 
00143       found = detectWithHomography(scene_surfset, model, model.Faces[idx],
00144         model_indices[i], scene_indices[i], distances[i],
00145         data, 3., false, detection);
00146 
00147       if(found) break;
00148     }
00149   }
00150 
00151   if(!found)
00152   {
00153     detection.points2d.clear();
00154     detection.points3d.clear();
00155     detection.points3d_model.clear();
00156   }
00157 }
00158 
00159 // ---------------------------------------------------------------------------
00160 /*
00161 bool SurfPlanarDetector::alignedPoints(const ObjectModel::Face &face,
00162     const std::vector<int> indices) const
00163 {
00164   if(indices.size() < 3) return true;
00165 
00166   const float sq_threshold = 0.15 * 0.15;
00167 
00168   cv::Mat P(indices.size(), 3, CV_32F);
00169   float *p = P.ptr<float>();
00170   for(unsigned int i = 0; i < indices.size(); ++i, p += 3)
00171   {
00172     const PLYPoint &ply = face.plypoints[indices[i]];
00173 
00174     p[0] = ply.x;
00175     p[1] = ply.y;
00176     p[2] = ply.z;
00177   }
00178 
00179   cv::Mat Q, mean;
00180   cv::calcCovarMatrix(P, Q, mean, CV_COVAR_ROWS | CV_COVAR_NORMAL);
00181 
00182   double d[3] = {
00183     Q.at<double>(0,0), Q.at<double>(1,1), Q.at<double>(2,2) };
00184 
00185   sort(d, d+3); // ascending
00186   return (d[1] / d[2] < sq_threshold);
00187 }
00188 */
00189 bool SurfPlanarDetector::alignedPoints(const ObjectModel::Face &face, 
00190     const std::vector<int> indices) const
00191 {
00192   if(indices.size() < 3) return true;
00193   
00194 //  const double sq_threshold = 0.10 * 0.10;
00195   const double sq_threshold = 0.25*0.25;//0.40;//0.001 * 0.001;
00196 //  const double sq_threshold = 0.10;//0.40;//0.001 * 0.001;
00197   
00198   cv::Mat P(indices.size(), 3, CV_32F);
00199   float *p = P.ptr<float>();
00200   for(unsigned int i = 0; i < indices.size(); ++i, p += 3)
00201   {
00202     const PLYPoint &ply = face.plypoints[indices[i]];
00203     
00204     p[0] = ply.x;
00205     p[1] = ply.y;
00206     p[2] = ply.z;
00207   }
00208   
00209   cv::Mat Q, mean;
00210   cv::calcCovarMatrix(P, Q, mean, CV_COVAR_ROWS | CV_COVAR_NORMAL);
00211   
00212   double d[3] = {
00213     Q.at<double>(0,0), Q.at<double>(1,1), Q.at<double>(2,2) };
00214   
00215   sort(d, d+3); // ascending
00216   //ROS_ERROR("(d[1] / d[2]) %f",(d[1] / d[2]));
00217   return ((d[1] / d[2]) < sq_threshold);
00218 //  return ((d[1] / d[2]) > sq_threshold);
00219 }
00220 // ---------------------------------------------------------------------------
00221 
00222 bool SurfPlanarDetector::detectWithHomography(const SurfSet &scene_surfset,
00223   ObjectModel &model, ObjectModel::Face &face,
00224   const std::vector<int> &model_indices,
00225   const std::vector<int> &scene_indices, const std::vector<float> &distances,
00226   const ObjectDetectorMethod::DetectionData &data,
00227   const double maxReprojectionError, bool doReChecking,
00228   re_msgs::DetectedObject &detection)
00229 {
00230   const int MIN_POINTS = 15;
00231   const int N = model_indices.size();
00232 
00233   if(N < MIN_POINTS) return false;
00234 
00235   // homography
00236   cv::Mat model_points(N, 2, CV_32F);
00237   cv::Mat scene_points(N, 2, CV_32F);
00238 
00239   for(int i = 0; i < N; ++i)
00240   {
00241     const cv::KeyPoint &model_kp = face.surf.keys[model_indices[i]];
00242     const cv::KeyPoint &scene_kp = scene_surfset.keys[scene_indices[i]];
00243 
00244     model_points.at<float>(i, 0) = model_kp.pt.x;
00245     model_points.at<float>(i, 1) = model_kp.pt.y;
00246 
00247     scene_points.at<float>(i, 0) = scene_kp.pt.x;
00248     scene_points.at<float>(i, 1) = scene_kp.pt.y;
00249   }
00250 
00251   vector<unsigned char> status;
00252   cv::Mat sHm = cv::findHomography(model_points, scene_points, status,
00253     CV_RANSAC, maxReprojectionError);
00254 
00255   cv::Mat _image_to_show;
00256   IF_DEBUG_OR_VISUALIZATION_MODE
00257   (
00258     vector<int> debug_scene_indices;
00259     vector<int> debug_model_indices;
00260     debug_scene_indices.reserve(status.size());
00261     debug_model_indices.reserve(status.size());
00262 
00263     for(unsigned int i = 0; i < status.size(); ++i)
00264     {
00265       if(status[i] != 0)
00266       {
00267         debug_scene_indices.push_back( scene_indices[i] );
00268         debug_model_indices.push_back( model_indices[i] );
00269       }
00270     }
00271 
00272     DUtilsCV::Drawing::drawCorrespondences(_image_to_show, data.img,
00273       face.image, scene_surfset.keys, face.surf.keys,
00274       debug_scene_indices, debug_model_indices);
00275 
00276     IF_DEBUG_MODE
00277     (
00278       std::string filename;
00279       if(doReChecking)
00280         filename = m_debug_prefix + "1_0_first_homography.png";
00281       else
00282         filename = m_debug_prefix + "1_1_re_homography.png";
00283 
00284       cv::imwrite(filename, _image_to_show);
00285     )
00286   )
00287 
00288   // check if there are enough inliers
00289   if(!sHm.empty())
00290   {
00291     vector<int> inlier_model_indices, inlier_scene_indices;
00292     vector<float> inlier_distances;
00293 
00294     inlier_scene_indices.reserve(status.size());
00295     inlier_model_indices.reserve(status.size());
00296     inlier_distances.reserve(status.size());
00297 
00298     for(unsigned int i = 0; i < status.size(); ++i)
00299     {
00300       if(status[i] != 0)
00301       {
00302         inlier_model_indices.push_back( model_indices[i] );
00303         inlier_scene_indices.push_back( scene_indices[i] );
00304         inlier_distances.push_back( distances[i] );
00305       }
00306     }
00307 
00308     if(!alignedPoints(face, inlier_model_indices))
00309     {
00310       //ROS_ERROR("NOT ALIGNED");
00311       int inliers = (int)inlier_model_indices.size();
00312 
00313       // find a better set of matches?
00314       if(doReChecking && inliers >= MIN_POINTS)
00315       {
00316         bool found = detectWithHomography(scene_surfset, model, face,
00317           inlier_model_indices, inlier_scene_indices, inlier_distances,
00318           data, 1, false, detection);
00319         if(found) return true;
00320       }
00321       if(inliers >= MIN_POINTS)
00322       {
00323         // found!
00324         //if(visualizationMode())
00325         //{
00326         //  visualize(_image_to_show);
00327         //}
00328 
00329         // calculate pose
00330         cv::Mat cTo;
00331         calculatePose(face, inlier_model_indices, scene_surfset,
00332           inlier_scene_indices, data.params.camera, cTo);
00333         //assert(cTo.type() == CV_64F);
00334 
00335         if(cTo.at<double>(2,3) / cTo.at<double>(3,3) <= 0)
00336         {
00337           ROS_DEBUG("cTo behind the camera, rejecting...");
00338         }
00339         else
00340         {
00341           // obj is in front of the camera
00342 
00343           IF_DEBUG_OR_VISUALIZATION_MODE
00344           (
00345             cv::Mat img;
00346             if(img.channels() == 3)
00347               img = data.img.clone();
00348             else
00349               cv::cvtColor(data.img, img, CV_GRAY2BGR);
00350 
00351             model.getVisualizationModel().draw(img, cTo,
00352               data.params.camera.GetIntrinsicParameters());
00353 
00354             IF_DEBUG_MODE
00355             (
00356               string filename = m_debug_prefix + "2_pose.png";
00357               cv::imwrite(filename, img);
00358             )
00359 
00360             if(visualizationMode()) visualize(img,
00361               model.Name + " located!", true);
00362           )
00363           IF_DEBUG_MODE
00364           (
00365            //MARTA-------------------------------------------------------------------------------
00366            cv::Mat img = _image_to_show;
00367            cv::cvtColor(data.img, img, CV_GRAY2BGR);
00368            string filename = m_debug_prefix + "marta.png";
00369            cv::imwrite(filename, img);
00370            //-----------------------------------------------------------------------------------------
00371           )
00372           // convert pose form camera orientation to robot orientation
00373           cv::Mat cTo_robot;
00374           changeOrientation(cTo, cTo_robot);
00375 
00376           // fill DetectedObject data
00377           convertPose(cTo_robot, detection.pose);
00378 
00379           // get all the points from the model and their corresponding ones
00380           // in the scene
00381            convert3DPoints(face, inlier_model_indices, cTo_robot, detection.points3d);
00382            project2DPoints(face, cTo, data.params.camera, inlier_model_indices, detection.points2d);
00383            get3DModelPoints(face, inlier_model_indices, detection.points3d_model);
00384            getPointsOctave(face,inlier_model_indices,detection.octave);
00385 
00386          // convert3DPoints(face, cTo_robot, detection.points3d);
00387          // project2DPoints(face, cTo, data.params.camera, detection.points2d);
00388          // get3DModelPoints(face, detection.points3d_model);
00389          // getPointsOctave(face,detection.octave);
00390 
00391           IF_DEBUG_MODE
00392           (
00393             //DUtilsCV::IO::print(cTo_robot, "--> rTo");
00394             fstream f((m_debug_prefix + "rTo.txt").c_str(), ios::out);
00395             DUtilsCV::IO::print(cTo_robot, "rTo", f);
00396             f.close();
00397             //debug::saveTscene(cTo_robot, detection.points3d,
00398             //  m_debug_prefix + "scene.wrl");
00399             std::fstream file((m_debug_prefix + "data_marta.txt").c_str(), std::fstream::out);
00400             file<<"keypointsScene=[";
00401             for(unsigned int i = 0; i < inlier_scene_indices.size(); ++i)
00402             {
00403                const cv::KeyPoint &kp = scene_surfset.keys[  inlier_scene_indices[i] ];
00404                float s = ((9.0f/1.2f) * kp.size/10.0f) / 3.0f;
00405                float o = kp.angle * 3.14159265 / 360.f;
00406                file<<s<<" "<<o<<" "<<kp.pt.x<<" "<<kp.pt.y<<" "<<endl;
00407             }
00408             file<<"];\n";
00409 
00410             file<<"points3dM=[\n";
00411             for(unsigned int i = 0; i < inlier_model_indices.size(); ++i)
00412             {
00413               const PLYPoint &p3d = face.plypoints[ inlier_model_indices[i] ];
00414               file<<p3d.x << " " <<p3d.y <<" "<<p3d.z<<endl;
00415             }
00416             file<<"];\n";
00417             file<<"T2=[" <<cTo<<"];\n";
00418 
00419             file.close();
00420           )
00421 
00422           /*
00423           detection.points2d.resize(0);
00424           detection.points2d.reserve(inliers);
00425           detection.points3d_model.resize(0);
00426           detection.points3d_model.reserve(inliers);
00427 
00428           for(unsigned int i = 0; i < inlier_model_indices.size(); ++i)
00429           {
00430             const PLYPoint &ply = face.plypoints[inlier_model_indices[i]];
00431             const cv::KeyPoint &scene_kp = scene_surfset.keys[inlier_scene_indices[i]];
00432 
00433             re_msgs::Pixel p2d;
00434             p2d.x = cvRound(scene_kp.pt.x);
00435             p2d.y = cvRound(scene_kp.pt.y);
00436             detection.points2d.push_back(p2d);
00437 
00438             geometry_msgs::Point p3d;
00439             p3d.x = ply.x;
00440             p3d.y = ply.y;
00441             p3d.z = ply.z;
00442             detection.points3d_model.push_back(p3d);
00443           }
00444           */
00445 
00446           return true;
00447         } // if Z positive
00448       } // if inliers >= min_inliers
00449     } // if points not aligned
00450     //else
00451     //  ROS_ERROR("ALIGNED");
00452 
00453   } // if ! sHm empty
00454 
00455   return false;
00456 }
00457 
00458 // ---------------------------------------------------------------------------
00459 


re_vision
Author(s): Dorian Galvez-Lopez
autogenerated on Sun Jan 5 2014 11:33:06