ORMatchingModule.cpp
Go to the documentation of this file.
00001 /*******************************************************************************
00002  *  ORMatchingModule.cpp
00003  *
00004  *  (C) 2006 AG Aktives Sehen <agas@uni-koblenz.de>
00005  *           Universitaet Koblenz-Landau
00006  *
00007  *******************************************************************************/
00008 
00009 #include "ORMatchingModule.h"
00010 #include "ORControlModule.h"
00011 
00012 #include <sstream>
00013 #include <fstream>
00014 #include <limits>
00015 #include <algorithm>
00016 
00017 #include <cv_bridge/cv_bridge.h>
00018 #include <sensor_msgs/image_encodings.h>
00019 #include <sensor_msgs/Image.h>
00020 
00021 #include "Architecture/Config/Config.h"
00022 #include "Architecture/Singleton/Clock.h"
00023 
00024 #include "Workers/Math/Math.h"
00025 
00026 #include "ObjectRecognition/CvHomography.h"
00027 #include "ObjectRecognition/HoughClusterer.h"
00028 #include "ObjectRecognition/MatchHelper.h"
00029 #include "ObjectRecognition/NNRMatcher.h"
00030 #include "ObjectRecognition/SimpleHoughClusterer.h"
00031 #include "ObjectRecognition/FLANNMatcher.h"
00032 
00033 #include "KeyPointExtraction/DefaultExtractor.h"
00034 
00035 #include <ros/package.h>
00036 #include <or_msgs/OrMatchResult.h>
00037 #include <or_msgs/OrObjectNames.h>
00038 
00039 #define THIS ORMatchingModule
00040 
00041 using namespace std;
00042 
00043 THIS::THIS(ros::NodeHandle *nh, std::string inputTopic)
00044 {
00045     m_Stage1Matcher = ORMatchingModule::Stage1MatcherT( Config::getInstance()->getInt( "ObjectRecognition.iStage1Matcher" ) );
00046     m_Stage2Matcher = ORMatchingModule::Stage2MatcherT( Config::getInstance()->getInt( "ObjectRecognition.iStage2Matcher" ) );
00047     m_ImagesRequested = 0;
00048     m_FlannMatcher = 0;
00049 
00050     m_Extractor = DefaultExtractor::createInstance();
00051     ROS_INFO_STREAM( "Selected feature extractor: " << m_Extractor->getName() << std::endl << std::endl << m_Extractor->getDescription()  );
00052 
00053     // subscribe to messages
00054     m_OrCommandSubscriber = nh->subscribe<or_msgs::OrCommand>("/or/commands", 100, &ORMatchingModule::callbackOrCommand, this);
00055     m_ExtractKeyPointsSubscriber = nh->subscribe<or_msgs::ExtractKeyPoints>("/or/extract", 100, &ORMatchingModule::callbackExtractKeyPoints, this);
00056 
00057     // subscribe to image input topic
00058     m_ImageSubscriber = nh->subscribe<sensor_msgs::Image>(inputTopic, 10, &ORMatchingModule::callbackImage, this);
00059 
00060     // advertise messages
00061     m_ORMatchResultPublisher = nh->advertise<or_msgs::OrMatchResult>("or/match_result", 100);
00062     m_ORObjectNamesPublisher = nh->advertise<or_msgs::OrObjectNames>("or/obj_names", 100);
00063 }
00064 
00065 THIS::~THIS()
00066 {
00067     ROS_INFO_STREAM( "Mean processing time: " << Math::mean( m_ProcessingTimes ) << "ms" );
00068     delete m_Extractor;
00069     if(m_FlannMatcher)
00070         delete m_FlannMatcher;
00071 }
00072 
00073 void THIS::callbackOrCommand( const or_msgs::OrCommand::ConstPtr& msg )
00074 {
00075     switch( msg->command )
00076     {
00077     case ORControlModule::UnloadObject:
00078         removeObjectProperties( msg->string_value );
00079         break;
00080 
00081     default:
00082         break;
00083     }
00084 }
00085 
00086 
00087 void THIS::callbackExtractKeyPoints( const or_msgs::ExtractKeyPoints::ConstPtr& msg )
00088 {
00089     //m_SourceId = (ImageSources::SourceId) msg->img_source; // TODO check for correct image source
00090 
00091     m_BoundingBoxes.clear();
00092     for( unsigned i = 0; i < msg->bounding_boxes.size(); i++)
00093     {
00094         Box2D<double> box(msg->bounding_boxes.at(i).minX, msg->bounding_boxes.at(i).minY,
00095                           msg->bounding_boxes.at(i).maxX, msg->bounding_boxes.at(i).maxY);
00096         m_BoundingBoxes.push_back(box);
00097     }
00098     m_ImagesRequested++;
00099 }
00100 
00101 void THIS::callbackImage( const sensor_msgs::Image::ConstPtr& message )
00102 {
00103     if ( m_ImagesRequested > 0 )
00104     {
00105         m_ImagesRequested--;
00106         processImageMessage(message);
00107     }
00108 }
00109 
00110 void THIS::processImageMessage( const sensor_msgs::Image::ConstPtr& message )
00111 {
00112         try
00113         {
00114             cv_bridge::CvImagePtr color_img_ptr;
00115             cv_bridge::CvImagePtr gray_img_ptr(new cv_bridge::CvImage);
00116 
00117             try
00118             {
00119                 color_img_ptr = cv_bridge::toCvCopy(message);
00120                 cvtColor( color_img_ptr->image, gray_img_ptr->image, CV_BGR2GRAY );
00121             }
00122             catch (cv_bridge::Exception error)
00123             {
00124                 ROS_ERROR("Error converting Image message to OpenCV image.");
00125             }
00126 
00127             ROS_INFO_STREAM( "analyzing gray image" );
00128             m_ImageWidth = gray_img_ptr->image.cols;
00129             m_ImageHeight = gray_img_ptr->image.rows;
00130 
00131             //convert bboxes from image to pixel coords
00132             std::vector< Box2D<int> > boundingBoxes( m_BoundingBoxes.size() );
00133             for ( unsigned i=0; i<m_BoundingBoxes.size(); i++ )
00134             {
00135                 boundingBoxes[i].setMinX( ( m_BoundingBoxes[i].minX() *  0.5 + 0.5 ) * m_ImageWidth );
00136                 boundingBoxes[i].setMaxX( ( m_BoundingBoxes[i].maxX() *  0.5 + 0.5 ) * m_ImageWidth );
00137                 boundingBoxes[i].setMinY( ( m_BoundingBoxes[i].maxY() * -0.5 + 0.5 ) * m_ImageHeight );
00138                 boundingBoxes[i].setMaxY( ( m_BoundingBoxes[i].minY() * -0.5 + 0.5 ) * m_ImageHeight );
00139                 ROS_INFO_STREAM( "Using bounding box: min = " << boundingBoxes[i].minX() << ", " << boundingBoxes[i].minY() <<  " max = " << boundingBoxes[i].maxX() << ", " << boundingBoxes[i].maxY() );
00140             }
00141 
00142 //            // TODO only for testing: create a "test-mode" demo in a similar way
00143 //            cv::namedWindow( "Display window", CV_WINDOW_AUTOSIZE );// Create a window for display.
00144 //            cv::imshow( "Display window", gray_img_ptr->image );   // Show our image inside it.
00145 //            cv::waitKey(0);
00146 
00147            processImages ( &gray_img_ptr->image, message->header.seq, boundingBoxes );
00148         }
00149         catch ( const char* exception_msg )
00150         {
00151             std::ostringstream stream;
00152             stream << "Caught exception: " << std::endl << exception_msg;
00153             ROS_ERROR_STREAM ( stream.str() );
00154             throw exception_msg;
00155         }
00156 }
00157 
00158 
00159 void THIS::processImages( cv::Mat *image, int seqNum, std::vector< Box2D<int> > boundingBoxes )
00160 {
00161     std::vector< KeyPoint > *rawKeyPoints = new std::vector< KeyPoint >();
00162 
00163     int startTime = Clock::getInstance()->getTimestamp();
00164 
00165     m_Extractor->setImage( *image );
00166     m_Extractor->getKeyPoints( *rawKeyPoints );
00167 
00168     ROS_INFO_STREAM ( "Found " << rawKeyPoints->size() << " keypoints in " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00169 
00170     processKeyPoints( image, rawKeyPoints, boundingBoxes, seqNum );
00171 
00172     double deltaT = Clock::getInstance()->getTimestamp() - startTime;
00173     m_ProcessingTimes.push_back( deltaT );
00174     // ROS_INFO_STREAM( "Processing time: " + Tracer::toString(deltaT) + "ms" ); // TODO
00175 }
00176 
00177 
00178 void THIS::processKeyPoints(  cv::Mat *image, std::vector< KeyPoint > *rawKeyPoints, std::vector< Box2D<int> > boundingBoxes, unsigned seqNum )
00179 {
00180     int startTime = Clock::getInstance()->getTimestamp();
00181 
00182     if( boundingBoxes.size() == 0 )
00183     {
00184         boundingBoxes.push_back( Box2D<int>(0,0,image->cols,image->rows) );
00185     }
00186 
00187     std::vector<MatchResult*> matchResults;
00188 
00189     if (rawKeyPoints->size() == 0) {
00190         ROS_ERROR_STREAM("No raw keypoints");
00191     } else {
00192         ROS_INFO_STREAM("Got " << rawKeyPoints->size() << " raw keypoints.");
00193     }
00194     //loop over bounding boxes
00195     for ( unsigned b=0; b<boundingBoxes.size(); b++ )
00196     {
00197         std::vector< KeyPoint > keyPoints;
00198         keyPoints.reserve( rawKeyPoints->size() );
00199 
00200         //maps indices of bounding box keypoints to (raw) scene keypoints
00201         std::vector< unsigned > keyPointIndexMap;
00202         keyPointIndexMap.reserve( rawKeyPoints->size() );
00203 
00204         //get keypoints for this bb
00205         for ( unsigned k=0; k<rawKeyPoints->size(); k++ )
00206         {
00207             if ( boundingBoxes[b].contains( (*rawKeyPoints)[k].x, (*rawKeyPoints)[k].y ) )
00208             {
00209                 //keyPoint is in one bounding box -> save and go to next kp
00210                 keyPoints.push_back( (*rawKeyPoints)[k] );
00211                 keyPointIndexMap.push_back( k );
00212             }
00213         }
00214 
00215         if (keyPoints.size() == 0)
00216             ROS_ERROR_STREAM("No keypoints for bounding box " << b);
00217 
00218         //recreate lookup
00219         if(m_Stage1Matcher==Flann)
00220         {
00221             delete m_FlannMatcher;
00222             m_FlannMatcher = new FLANNMatcher();
00223             m_FlannMatcher->createIndex(&keyPoints);
00224         }
00225 
00226         //match all objects
00227         for ( unsigned i=0; i<m_ObjectList.size(); i++ )
00228         {
00229             MatchResult* matchResult=new MatchResult;
00230             if ( matchObject( &keyPoints, m_ObjectList[i], *matchResult ) )
00231             {
00232                 ROS_INFO_STREAM( "Detected object '" << m_ObjectList[i].getName() << "' in bounding box # " << b );
00233                 matchResult->boundingBoxIndex = b;
00234                 matchResult->keyPointIndexMap = keyPointIndexMap;
00235                 matchResults.push_back( matchResult );
00236             }
00237             else
00238             {
00239                 delete matchResult;
00240             }
00241         }
00242     }
00243 
00244     ROS_INFO_STREAM ( "Matching took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00245 
00246     // TODO - clean up creation of ROS message for ORMatchResult
00247 
00248     // prepare ROS messages for publishing
00249     // create image message
00250     cv_bridge::CvImage result_img;
00251     result_img.image = *image;
00252 
00253     sensor_msgs::Image image_msg;
00254     image_msg = *(result_img.toImageMsg());
00255 
00256     // create KeyPoints message
00257     std::vector<or_msgs::KeyPoint> key_points;
00258     or_msgs::KeyPoint kp;
00259     for( unsigned i = 0; i < rawKeyPoints->size(); i++)
00260     {
00261         kp.x = rawKeyPoints->at(i).x;
00262         kp.y = rawKeyPoints->at(i).y;
00263         kp.scale = rawKeyPoints->at(i).scale;
00264         kp.strength = rawKeyPoints->at(i).strength;
00265         kp.orientation = rawKeyPoints->at(i).orientation;
00266         kp.sign = rawKeyPoints->at(i).sign;
00267         kp.feature_vector = rawKeyPoints->at(i).featureVector;
00268         kp.vector_limits = rawKeyPoints->at(i).vectorLimits;
00269         key_points.push_back(kp);
00270     }
00271 
00272     // create MatchResult message
00273     std::vector<or_msgs::MatchResult> match_results;
00274     or_msgs::MatchResult mr;
00275     for( unsigned i = 0; i < matchResults.size(); i++)
00276     {
00277         mr.object_name = matchResults.at(i)->objectName;
00278         mr.object_type = matchResults.at(i)->objectType;
00279 
00280         cv_bridge::CvImage result;
00281         //result.image = adapt_matchresult_image.asIplImage();
00282         result.image = *image;
00283 
00284         mr.image = *(result.toImageMsg());
00285         mr.image_index = matchResults.at(i)->imageIndex;
00286         mr.image_name = matchResults.at(i)->imageName;
00287 
00288         or_msgs::Point2D point_outline;
00289         for( unsigned outline_index = 0; outline_index < matchResults.at(i)->outline.size(); outline_index++ )
00290         {
00291             point_outline.x = matchResults.at(i)->outline.at(outline_index).x();
00292             point_outline.x = matchResults.at(i)->outline.at(outline_index).y();
00293             mr.outline.push_back(point_outline);
00294         }
00295 
00296         or_msgs::Point2D point_bbox;
00297         for( unsigned bb_index = 0; bb_index < matchResults.at(i)->bBox.size(); bb_index++ )
00298         {
00299             point_bbox.x = matchResults.at(i)->bBox.at(bb_index).x();
00300             point_bbox.x = matchResults.at(i)->bBox.at(bb_index).y();
00301             mr.outline.push_back(point_bbox);
00302         }
00303 
00304         mr.center.x = matchResults.at(i)->center.x();
00305         mr.center.y = matchResults.at(i)->center.y();
00306         mr.bounding_box_index = matchResults.at(i)->boundingBoxIndex;
00307 
00308         for( unsigned kpim_index = 0; kpim_index < matchResults.at(i)->keyPointIndexMap.size(); kpim_index++ )
00309         {
00310             mr.key_point_index_map.push_back(  matchResults.at(i)->keyPointIndexMap.at(kpim_index) );
00311         }
00312 
00313         or_msgs::KeyPoint obj_kp;
00314         for( unsigned kp_index = 0; kp_index < matchResults.at(i)->objectKeyPoints.size(); kp_index++)
00315         {
00316             obj_kp.x = matchResults.at(i)->objectKeyPoints.at(kp_index).x;
00317             obj_kp.y = matchResults.at(i)->objectKeyPoints.at(kp_index).y;
00318             obj_kp.scale = matchResults.at(i)->objectKeyPoints.at(kp_index).scale;
00319             obj_kp.strength = matchResults.at(i)->objectKeyPoints.at(kp_index).strength;
00320             obj_kp.orientation = matchResults.at(i)->objectKeyPoints.at(kp_index).orientation;
00321             obj_kp.sign = matchResults.at(i)->objectKeyPoints.at(kp_index).sign;
00322             obj_kp.feature_vector = matchResults.at(i)->objectKeyPoints.at(kp_index).featureVector;
00323             obj_kp.vector_limits = matchResults.at(i)->objectKeyPoints.at(kp_index).vectorLimits;
00324             mr.object_key_points.push_back(obj_kp);
00325         }
00326 
00327         or_msgs::KeyPointMatch kp_match_1;
00328         std::list<KeyPointMatch>::iterator kp_match_iter_1;
00329         for( kp_match_iter_1 = matchResults.at(i)->stage1Matches.begin(); kp_match_iter_1 != matchResults.at(i)->stage1Matches.end(); kp_match_iter_1++)
00330         {
00331             kp_match_1.index1 = kp_match_iter_1->index1;
00332             kp_match_1.index2 = kp_match_iter_1->index2;
00333             kp_match_1.distance = kp_match_iter_1->distance;
00334             kp_match_1.turn_angle = kp_match_iter_1->turnAngle;
00335             kp_match_1.scale_quotient = kp_match_iter_1->scaleQuotient;
00336             mr.stage1_matches.push_back( kp_match_1 );
00337         }
00338 
00339 
00340         or_msgs::KeyPointMatchArray kp_match_2_arr;
00341         for( unsigned st2m_index = 0; st2m_index < matchResults.at(i)->stage2Matches.size(); st2m_index++)
00342         {
00343             or_msgs::KeyPointMatch kp_match_2;
00344             std::list<KeyPointMatch>::iterator kp_match_iter_2;
00345             for( kp_match_iter_2 = matchResults.at(i)->stage2Matches.at(st2m_index).begin(); kp_match_iter_2 != matchResults.at(i)->stage2Matches.at(st2m_index).end(); kp_match_iter_2++)
00346             {
00347                 kp_match_2.index1 = kp_match_iter_2->index1;
00348                 kp_match_2.index2 = kp_match_iter_2->index2;
00349                 kp_match_2.distance = kp_match_iter_2->distance;
00350                 kp_match_2.turn_angle = kp_match_iter_2->turnAngle;
00351                 kp_match_2.scale_quotient = kp_match_iter_2->scaleQuotient;
00352                 kp_match_2_arr.key_point_match_array.push_back(kp_match_2);
00353             }
00354             mr.stage2_matches.push_back(kp_match_2_arr);
00355         }
00356 
00357         or_msgs::KeyPointMatch kp_match_3;
00358         std::list<KeyPointMatch>::iterator kp_match_iter_3;
00359         for( kp_match_iter_3 = matchResults.at(i)->stage3Matches.begin(); kp_match_iter_3 != matchResults.at(i)->stage3Matches.end(); kp_match_iter_3++)
00360         {
00361             kp_match_3.index1 = kp_match_iter_3->index1;
00362             kp_match_3.index2 = kp_match_iter_3->index2;
00363             kp_match_3.distance = kp_match_iter_3->distance;
00364             kp_match_3.turn_angle = kp_match_iter_3->turnAngle;
00365             kp_match_3.scale_quotient = kp_match_iter_3->scaleQuotient;
00366             mr.stage3_matches.push_back( kp_match_3 );
00367         }
00368 
00369         or_msgs::KeyPoint scene_kp;
00370         for( unsigned scene_kp_index = 0; scene_kp_index < matchResults.at(i)->sceneKeyPointsWithinOutline.size(); scene_kp_index++)
00371         {
00372             scene_kp.x = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).x;
00373             scene_kp.y = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).y;
00374             scene_kp.scale = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).scale;
00375             scene_kp.strength = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).strength;
00376             scene_kp.orientation = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).orientation;
00377             scene_kp.sign = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).sign;
00378             scene_kp.feature_vector = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).featureVector;
00379             scene_kp.vector_limits = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).vectorLimits;
00380             mr.scene_key_points_within_outline.push_back(scene_kp);
00381         }
00382 
00383         for( unsigned hom_index = 0; hom_index < 9; hom_index++)
00384         {
00385             mr.homography.at(hom_index) = matchResults.at(i)->homography.m_HomMat[hom_index];
00386         }
00387 
00388         match_results.push_back(mr);
00389     }
00390 
00391     // create boundingbox message
00392     std::vector<or_msgs::BoundingBox2D> bboxes;
00393     or_msgs::BoundingBox2D bbox;
00394     for( unsigned bb_index = 0; bb_index < boundingBoxes.size(); bb_index++)
00395     {
00396         bbox.minX = boundingBoxes.at(bb_index).minX();
00397         bbox.minY = boundingBoxes.at(bb_index).minY();
00398         bbox.maxX = boundingBoxes.at(bb_index).maxX();
00399         bbox.maxY = boundingBoxes.at(bb_index).maxY();
00400         bboxes.push_back(bbox);
00401     }
00402 
00403 
00404     // create OrMatchResult message containing the results of the object recognition
00405     or_msgs::OrMatchResult or_match_result_msg;
00406     or_match_result_msg.image = image_msg;
00407     or_match_result_msg.key_points = key_points;
00408     or_match_result_msg.match_results = match_results;
00409     or_match_result_msg.bounding_boxes = bboxes;
00410     or_match_result_msg.seq_num = seqNum;
00411     m_ORMatchResultPublisher.publish(or_match_result_msg);
00412 }
00413 
00414 
00415 bool THIS::matchObject( std::vector< KeyPoint >* sceneKeyPoints, ObjectProperties& objProperties, MatchResult& matchResult )
00416 {
00417     matchResult.objectName = objProperties.getName();
00418     matchResult.objectType = objProperties.getType();
00419 
00420     std::vector< ImagePropertiesCV* > objImageProperties = objProperties.getImageProperties();
00421 
00422     std::ostringstream stream;
00423     std::ostringstream stream2;
00424 
00425     int minMatches = Config::getInt( "ObjectRecognition.iMinMatchedKeyPoints" );
00426     if ( minMatches < 4 ) { minMatches = 4; }
00427 
00428     bool matchFound = false;
00429     int maxMatches = 0;
00430 
00431     stream2 << "Image:  #matches stage 1 / stage 2 / stage 3 / ratio / success" << std::endl;
00432 
00433     //loop over all object images
00434     for ( unsigned i=0; i < objImageProperties.size(); i++ )
00435     {
00436         std::list< KeyPointMatch > stage1Matches;
00437 
00438         if(m_Stage1Matcher==Flann)
00439         {
00440             stage1Matches = matchStage1Flan( objImageProperties[i] );
00441         }
00442         else
00443         {
00444             stage1Matches = matchStage1( sceneKeyPoints, objImageProperties[i] );
00445         }
00446 
00447         std::vector< std::list< KeyPointMatch> > stage2Matches = matchStage2( sceneKeyPoints, objImageProperties[i], stage1Matches );
00448 
00449         //Number of stage2 matches is 0 if vector is empty of length of first entry (sorted in descending order)
00450         int numMatches2 = stage2Matches.size() > 0 ? stage2Matches[0].size() : 0;
00451 
00452         Homography homography;
00453         int numMatches3 = 0;
00454         bool success = false;
00455         std::list< KeyPointMatch > stage3Matches;
00456         std::vector<KeyPoint> sceneKeyPointsWithinOutline;
00457 
00458         stage3Matches = matchStage3( sceneKeyPoints, objImageProperties[i], stage2Matches, homography );
00459         numMatches3 = stage3Matches.size();
00460         //Compute probability of recognition result
00461         sceneKeyPointsWithinOutline = getSceneKeyPointsWithinOutline(sceneKeyPoints, homography, stage3Matches);
00462 
00463         double ratio = 0;
00464 
00465         if(sceneKeyPointsWithinOutline.size()==0)
00466         {
00467             ratio = 0;
00468         }
00469         else
00470         {
00471             int numObjFeatures = objImageProperties[i]->getKeyPoints()->size();
00472             int numSceneFeatures = sceneKeyPointsWithinOutline.size();
00473             //edit(dg): use smaller number of features for comparison, in case the object and scene image differ very much in size
00474             int numFeaturesMin = numObjFeatures < numSceneFeatures ? numObjFeatures : numSceneFeatures;
00475             ratio = (double) numMatches3/numFeaturesMin;
00476         }
00477 
00478         if(ratio>1)ratio=1; //happens if there are more scene keypoints in bounding box than in real object
00479 
00480 
00481         //fMinMatchPercentage of all available features in bounding box of object should have been matched
00482         if ((numMatches3 >= minMatches) && ratio >= Config::getFloat("ObjectRecognition.fMinMatchPercentage"))
00483         {
00484             //      cout << Config::getFloat("ObjectRecognition.fMinMatchPercentage");
00485             success = true;
00486         }
00487 
00488         stream2 << i << ": " << stage1Matches.size() << " / " << numMatches2 << " / " << numMatches3;
00489 
00490         stream2 << " / " <<  double(int(ratio*100))/100.0;
00491 
00492         //     stream2 << "Ratio: " <<  ratio << " (numMatches3/sceneKeyPointsWithinOutline) = " << numMatches3 << "/" << sceneKeyPointsWithinOutline.size() << endl;
00493 
00494         if (success)
00495         {
00496             stream2 << " X";
00497             if ( numMatches3 > maxMatches )
00498             {
00499                 matchResult.imageIndex = i;
00500                 matchResult.imageName = objImageProperties[i]->getName();
00501                 matchResult.stage1Matches = stage1Matches;
00502                 matchResult.stage2Matches = stage2Matches;
00503                 matchResult.stage3Matches = stage3Matches;
00504                 matchResult.sceneKeyPointsWithinOutline = sceneKeyPointsWithinOutline;
00505                 matchResult.homography = homography;
00506                 maxMatches = numMatches3;
00507                 matchFound = true;
00508             }
00509         }
00510         stream2 << endl;
00511     }
00512 
00513     if ( matchFound )
00514     {
00515         matchResult.outline = *( objImageProperties[ matchResult.imageIndex ]->getOutline() );
00516         matchResult.center = objImageProperties[ matchResult.imageIndex ]->getCenter();
00517 
00518         matchResult.image = objImageProperties[ matchResult.imageIndex ]->getMaskedImageUV();
00519 
00520         matchResult.bBox = objImageProperties[ matchResult.imageIndex ]->getBoundingBox();
00521         matchResult.objectKeyPoints = *( objImageProperties[ matchResult.imageIndex ]->getKeyPoints() );
00522         stream2 << "Detected " << objProperties.getType() << ": " << objProperties.getName() << " (image " << matchResult.imageIndex << " " << matchResult.imageName  << ")";
00523         stream2 << "\nHomography:\n" << matchResult.homography.toString();
00524     }
00525 
00526     //Write result to "log/orResult.txt"
00527     std::fstream filestr;
00528     filestr.open("log/orResult.txt", ios::in | ios::out | ios::ate);
00529 
00530     //FIXME
00531     string fileName;// = inbox<ImageM> ( MessageTypes::IMAGE_M )->getFileName();
00532 
00533     if ( matchFound )
00534     {
00535         double tmpRatio = 0;
00536         if(matchResult.sceneKeyPointsWithinOutline.size()==0)
00537         {
00538             tmpRatio = 0;
00539         }
00540         else
00541         {
00542             tmpRatio = (double) matchResult.stage3Matches.size()/matchResult.sceneKeyPointsWithinOutline.size();
00543         }
00544 
00545         if(tmpRatio>1)tmpRatio=1; //happens if there are more scene keypoints in bounding box than in real object
00546         filestr << fileName << " -> detected " <<  ": " << matchResult.objectName << " (image " << matchResult.imageIndex << " " << matchResult.imageName << " #" << matchResult.objectKeyPoints.size() << "# ) *" << matchResult.stage3Matches.size() << "* with ratio "<< tmpRatio << "\n";
00547     }
00548     else
00549     {
00550         filestr << fileName << " -> failed\n";
00551     }
00552     filestr.close();
00553 
00554     ROS_INFO_STREAM( stream2.str() );
00555     ROS_DEBUG_STREAM( stream.str() );
00556 
00557     return matchFound;
00558 }
00559 
00560 
00561 
00562 std::list< KeyPointMatch > THIS::matchStage1( vector< KeyPoint > *sceneKeyPoints, ImagePropertiesCV *objImageProperties )
00563 {
00564     int startTime = Clock::getInstance()->getTimestamp();
00565 
00566     vector< KeyPoint >* objectImageKeyPoints=objImageProperties->getKeyPoints();
00567 
00568     float maxNNR = Config::getFloat( "ObjectRecognition.NNRMatching.fMaxNearestNeighbourRatio" );
00569     NNRMatcher nnrMatcher( sceneKeyPoints, objectImageKeyPoints );
00570     nnrMatcher.match( maxNNR );
00571 
00572     ROS_INFO_STREAM ( "Nearest-Neighbor Matching of " << sceneKeyPoints->size() << " vs " << objectImageKeyPoints->size() << " keypoints took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00573 
00574     ROS_DEBUG_STREAM( nnrMatcher.getLog() );
00575     return nnrMatcher.getMatches();
00576 }
00577 
00578 
00579 
00580 std::list< KeyPointMatch > THIS::matchStage1Flan( ImagePropertiesCV *objImageProperties )
00581 {
00582     int startTime = Clock::getInstance()->getTimestamp();
00583 
00584     vector< KeyPoint >* objectImageKeyPoints=objImageProperties->getKeyPoints();
00585 
00586     float maxNNR = Config::getFloat( "ObjectRecognition.NNRMatching.fMaxNearestNeighbourRatio" );
00587 
00588     m_FlannMatcher->match(objectImageKeyPoints,maxNNR);
00589 
00590     ROS_INFO_STREAM ( "Nearest-Neighbor Matching with FLAN found " << m_FlannMatcher->getNumMatches() << " of " << objectImageKeyPoints->size() << " keypoints took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00591 
00592     ROS_INFO_STREAM( m_FlannMatcher->getLog() );
00593     return m_FlannMatcher->getMatches();
00594 }
00595 
00596 
00597 
00598 bool sizeComp( std::list< KeyPointMatch> list1, std::list< KeyPointMatch> list2 )
00599 {
00600     return list1.size() > list2.size();
00601 }
00602 
00603 
00604 
00605 std::vector< std::list< KeyPointMatch> > THIS::matchStage2( vector< KeyPoint > *sceneKeyPoints, ImagePropertiesCV *objImageProperties, std::list< KeyPointMatch > &stage1Matches )
00606 {
00607     int startTime = Clock::getInstance()->getTimestamp();
00608 
00609     vector< KeyPoint >* objectImageKeyPoints=objImageProperties->getKeyPoints();
00610 
00611     MatchHelper::calcScaleQuotients( sceneKeyPoints, objectImageKeyPoints, stage1Matches );
00612     MatchHelper::calcTurnAngles( sceneKeyPoints, objectImageKeyPoints, stage1Matches );
00613 
00614     switch ( m_Stage2Matcher )
00615     {
00616     case SimpleHoughClustering:
00617     {
00618         ROS_DEBUG_STREAM("matchStage2 -> SimpleHoughClustering");
00619 
00620         /* perform Hough Clustering on matches */
00621         SimpleHoughClusterer houghClusterer( sceneKeyPoints, objectImageKeyPoints, stage1Matches );
00622         houghClusterer.eliminateByOrientation();
00623         houghClusterer.eliminateByScale();
00624         houghClusterer.eliminateByPosition( Config::getFloat( "ObjectRecognition.SimpleHoughClustering.fMaxMatchDistance" ) );
00625         ROS_DEBUG_STREAM( houghClusterer.getLog() );
00626         vector< std::list< KeyPointMatch> > matches;
00627         matches.push_back(houghClusterer.getMatches());
00628 
00629         ROS_INFO_STREAM ( "SimpleHoughClustering took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00630         return matches;
00631     }
00632 
00633     case HoughClustering:
00634     {
00635         ROS_DEBUG_STREAM("matchStage2 -> HoughClustering");
00636 
00637         /* perform Hough Clustering on matches */
00638         /*
00639       TRACE_SYSTEMINFO("colorFormat: "<<  inbox<ImageM> ( MessageTypes::IMAGE_M )->getColorFormat());
00640 
00641       int w = inbox<ImageM> ( MessageTypes::IMAGE_M )->getRgbImage()->getWidth();
00642       int h = inbox<ImageM> ( MessageTypes::IMAGE_M )->getRgbImage()->getHeight();
00643 
00644       TRACE_SYSTEMINFO("w h ok");*/
00645 
00646         HoughClusterer houghClusterer( sceneKeyPoints, objImageProperties->getKeyPoints(), objImageProperties->getCenter(), m_ImageWidth, m_ImageHeight );
00647 
00648         houghClusterer.setNNMatches(stage1Matches);
00649 
00650         vector< std::list< KeyPointMatch> > matches = houghClusterer.clusterAccumulator();
00651 
00652         if(Config::getInstance()->getBool( "ObjectRecognition.HoughClustering.bPlot" ))
00653         {
00654             std::string path = ros::package::getPath("or_nodes");
00655 
00656             cv::Mat* guiImageNN = new cv::Mat();
00657             houghClusterer.getImage( *guiImageNN );
00658             cv::imwrite(path + "/images/ORHoughAccumulatorNN.ppm", *guiImageNN);
00659 
00660             cv::Mat* guiImageClustered = new cv::Mat();
00661             houghClusterer.getImage( *guiImageClustered );
00662             cv::imwrite(path + "/images/ORHoughAccumulatorClustered.ppm", *guiImageClustered);
00663 
00664             cv::Mat* guiImageDiff = new cv::Mat();
00665             guiImageDiff->resize(guiImageNN->rows, guiImageNN->cols);
00666 
00667             for ( int y=0; y<guiImageNN->rows; y++ )
00668             {
00669                 for ( int x=0; x<guiImageNN->cols; x++ )
00670                 {
00671                     cv::Vec3b diffVec = guiImageNN->at<cv::Vec3b>(y,x) - guiImageClustered->at<cv::Vec3b>(y,x);
00672                     guiImageDiff->at<cv::Vec3b>(y,x) = diffVec;
00673                 }
00674             }
00675             cv::imwrite(path + "/images/ORHoughAccumulatorDiff.ppm", *guiImageDiff);
00676 
00677             delete guiImageNN;
00678             delete guiImageClustered;
00679             delete guiImageDiff;
00680         }
00681 
00682         ROS_INFO_STREAM ( "HoughClustering took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms, " << matches.size() << " hypotheses found." );
00683         ROS_DEBUG_STREAM( houghClusterer.getLog() );
00684 
00685         std::sort( matches.begin(), matches.end(), sizeComp );
00686 
00687         return matches;
00688     }
00689 
00690     default:
00691     {
00692         std::vector< std::list< KeyPointMatch> > matches;
00693         matches.push_back( stage1Matches );
00694         return matches;
00695     }
00696     }
00697 
00698     return std::vector< std::list< KeyPointMatch> > ();
00699 }
00700 
00701 
00702 std::list< KeyPointMatch > THIS::matchStage3( std::vector< KeyPoint > *sceneKeyPoints,
00703                                               ImagePropertiesCV *objImageProperties,
00704                                               std::vector< std::list< KeyPointMatch> > &stage2Matches,
00705                                               Homography &homography )
00706 {
00707     ROS_DEBUG_STREAM("-------- Homography -----------\n\n");
00708 
00709     if(stage2Matches.size()==0)
00710     {
00711         ROS_DEBUG_STREAM("No KeyPointMatch available for homography");
00712         return std::list< KeyPointMatch>();
00713     }
00714 
00715     int startTime = Clock::getInstance()->getTimestamp();
00716 
00717     //iterate stage2 matches; remember max homography; check only stage2 matches with more matches than best homography has
00718 
00719     std::list< KeyPointMatch> maxMatches;
00720     Homography maxHomography;
00721 
00722     int maxHomographies=5;
00723     int numHomographies = stage2Matches.size();
00724 
00725     if ( numHomographies > maxHomographies )
00726     {
00727         numHomographies = maxHomographies;
00728     }
00729 
00730     ROS_INFO_STREAM( "Calculating homographies for " << numHomographies << " of " << stage2Matches.size() << " hypotheses." );
00731 
00732     //Iterate over all bins
00733     for(int i=0;i<numHomographies;++i)
00734     {
00735 
00736         std::list< KeyPointMatch> stage2MatchList = stage2Matches[i];
00737 
00738         if(stage2MatchList.size()<=maxMatches.size())
00739         {
00740             ROS_DEBUG_STREAM("Stop checking bins -> bin "<< i << " has less or equal entries with " << stage2MatchList.size() << " matches compared to max matches " << maxMatches.size() << ".");
00741             break;
00742         }
00743 
00744         ROS_DEBUG_STREAM("Checking bin "<< i << " with " << stage2MatchList.size() << " matches.");
00745 
00746         /*
00747     if(i>= Config::getInt( "ObjectRecognition.Homography.iMaxBins"))
00748     {
00749       TRACE_INFO("Stop checking bins -> bin "<< i << ", because iMaxBins was reached.");
00750       break;
00751     }*/
00752 
00753         //compute homography for current bin
00754         CvHomography cvHomography( sceneKeyPoints, objImageProperties->getKeyPoints(), stage2MatchList );
00755         if ( cvHomography.computeHomography() )
00756         {
00757             //check if the object's bounding box can be transformed with the homography
00758             vector< Point2D > bBox = objImageProperties->getBoundingBox();
00759             homography = cvHomography.getHomography();
00760             if ( homography.checkValidity( bBox ) )
00761             {
00762                 cvHomography.eliminateBadMatches();
00763                 std::list< KeyPointMatch> homMatches = cvHomography.getMatches();
00764 
00765                 //if best bin is worse than current homography take this bin
00766                 if(maxMatches.size()<homMatches.size())
00767                 {
00768                     ROS_DEBUG_STREAM("-> Homography is better with " << homMatches.size() << " matches instead of " << maxMatches.size() << " matches.");
00769                     maxMatches = homMatches;
00770                     maxHomography = homography;
00771                 }
00772                 else
00773                 {
00774                     ROS_DEBUG_STREAM("-> Homography is worse or equal with " << homMatches.size() << " matches instead of " << maxMatches.size() << " matches.");
00775                 }
00776             }
00777             else
00778             {
00779                 ROS_DEBUG_STREAM("-> Homography is invalid");
00780             }
00781         }
00782         else
00783         {
00784             ROS_DEBUG_STREAM("-> Bin has no homography");
00785         }
00786     }
00787 
00788     homography = maxHomography;
00789 
00790     ROS_INFO_STREAM ( "Homography took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00791     return maxMatches;
00792 }
00793 
00794 vector<KeyPoint> THIS::getSceneKeyPointsWithinOutline(vector< KeyPoint >* sceneKeyPoints, Homography& homography, std::list< KeyPointMatch >& stage3Matches)
00795 {
00796     int startTime = Clock::getInstance()->getTimestamp();
00797 
00798     vector<KeyPoint> sceneKeyPointsWithinOutline;
00799 
00800     if(stage3Matches.size()==0)
00801     {
00802         ROS_DEBUG_STREAM( "Getting scene points within outline without stage3 matches took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00803         return sceneKeyPointsWithinOutline;
00804     }
00805 
00806     // Project sceneKeyPoints with homography to get number of features within object outline (KeyPoint for display, Point2D to compute if valid)
00807     std::vector< std::pair< KeyPoint, Point2D> > projSceneKeyPoints;
00808 
00809     //project sceneKeyPoints with homography
00810     for ( vector< KeyPoint>::iterator match = sceneKeyPoints->begin(); match != sceneKeyPoints->end(); match++ )
00811     {
00812         Point2D point = homography.transform(match->position());
00813         if(point.isValid()) projSceneKeyPoints.push_back(std::pair< KeyPoint, Point2D>(*match,point));
00814     }
00815 
00816     if(projSceneKeyPoints.size()>0)
00817     {
00818         //Project stage3Matches with homography to get bounding box of outline
00819         std::vector<Point2D> projObjKeyPoints;
00820         for ( std::list<KeyPointMatch>::iterator match = stage3Matches.begin(); match != stage3Matches.end(); match++ )
00821         {
00822             Point2D point = homography.transform(sceneKeyPoints->at(match->index1).position());
00823             projObjKeyPoints.push_back(point);
00824         }
00825 
00826         //Get bounding box of projected object outline to check if scene keypoints are within this area
00827 
00828         int xmin = 0;
00829         int ymin = 0;
00830         int xmax = 0;
00831         int ymax = 0;
00832 
00833         if (projObjKeyPoints.size() > 0) {
00834             xmin = projObjKeyPoints.at(0).x();
00835             xmax = projObjKeyPoints.at(0).x();
00836             ymin = projObjKeyPoints.at(0).y();
00837             ymax = projObjKeyPoints.at(0).y();
00838         }
00839 
00840         for (unsigned int j=1;j<projObjKeyPoints.size();j++)
00841         {
00842             if ( projObjKeyPoints.at(j).x() < xmin ) xmin = projObjKeyPoints.at(j).x();
00843             if ( projObjKeyPoints.at(j).x() > xmax ) xmax = projObjKeyPoints.at(j).x();
00844             if ( projObjKeyPoints.at(j).y() < ymin ) ymin = projObjKeyPoints.at(j).y();
00845             if ( projObjKeyPoints.at(j).y() > ymax ) ymax = projObjKeyPoints.at(j).y();
00846         }
00847 
00848         for(unsigned int j=0;j<projSceneKeyPoints.size();++j)
00849         {
00850             int posX = projSceneKeyPoints.at(j).second.x();
00851             int posY = projSceneKeyPoints.at(j).second.y();
00852             if(posX>=xmin && posX<=xmax && posY>=ymin && posY<=ymax)
00853             {
00854                 sceneKeyPointsWithinOutline.push_back(projSceneKeyPoints.at(j).first);
00855             }
00856         }
00857     }
00858 
00859     ROS_DEBUG_STREAM( "Getting scene points within outline took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00860 
00861     return sceneKeyPointsWithinOutline;
00862 }
00863 
00864 void THIS::addObjectProperties(ObjectProperties* newProperties)
00865 {
00866     ostringstream stream;
00867     stream << "Adding object: " << endl;
00868     newProperties->printOn( stream );
00869     ROS_INFO_STREAM( stream.str() );
00870 
00871     for ( unsigned i=0; i<m_ObjectList.size(); i++ )
00872     {
00873         if ( m_ObjectList[i].getName() == newProperties->getName() )
00874         {
00875             ROS_WARN_STREAM( "Object " << m_ObjectList[i].getName() << " already loaded. Replacing." );
00876             removeObjectProperties( m_ObjectList[i].getName() );
00877         }
00878     }
00879 
00880     m_ObjectList.push_back( *newProperties );
00881 
00882     sendObjectNames();
00883 }
00884 
00885 
00886 void THIS::removeObjectProperties(std::string name)
00887 {
00888     std::deque<ObjectProperties>::iterator iterProperties;
00889 
00890     for ( iterProperties = m_ObjectList.begin(); iterProperties != m_ObjectList.end(); ++iterProperties )
00891     {
00892         if( name == iterProperties->getName() )
00893         {
00894             ROS_WARN_STREAM( "Unloading object " << name );
00895             m_ObjectList.erase(iterProperties);
00896             return;
00897         }
00898     }
00899     sendObjectNames();
00900 }
00901 
00902 
00903 void THIS::sendObjectNames()
00904 {
00905     // Return changed objectList to GUI
00906     or_msgs::OrObjectNames obj_names_msg;
00907     std::deque<ObjectProperties>::iterator iter;
00908     for (iter = m_ObjectList.begin(); iter != m_ObjectList.end(); ++iter)
00909     {
00910         obj_names_msg.object_names.push_back(iter->getName());
00911         obj_names_msg.object_types.push_back(iter->getType());
00912     }
00913 
00914     m_ORObjectNamesPublisher.publish( obj_names_msg );
00915 }
00916 
00917 #undef THIS


or_nodes
Author(s): Viktor Seib
autogenerated on Tue Jan 7 2014 11:24:09