ORMatchingModule.cpp
Go to the documentation of this file.
00001 /*******************************************************************************
00002  *  ORMatchingModule.cpp
00003  *
00004  *  (C) 2006 AG Aktives Sehen <agas@uni-koblenz.de>
00005  *           Universitaet Koblenz-Landau
00006  *
00007  *******************************************************************************/
00008 
00009 #include "ORMatchingModule.h"
00010 #include "ORControlModule.h"
00011 
00012 #include <sstream>
00013 #include <fstream>
00014 #include <limits>
00015 #include <algorithm>
00016 
00017 #include <cv_bridge/cv_bridge.h>
00018 #include <sensor_msgs/image_encodings.h>
00019 #include <sensor_msgs/Image.h>
00020 
00021 #include "Architecture/Config/Config.h"
00022 #include "Architecture/Singleton/Clock.h"
00023 
00024 #include "Workers/Math/Math.h"
00025 
00026 #include "ObjectRecognition/CvHomography.h"
00027 #include "ObjectRecognition/HoughClusterer.h"
00028 #include "ObjectRecognition/MatchHelper.h"
00029 #include "ObjectRecognition/NNRMatcher.h"
00030 #include "ObjectRecognition/SimpleHoughClusterer.h"
00031 #include "ObjectRecognition/FLANNMatcher.h"
00032 
00033 #include "KeyPointExtraction/DefaultExtractor.h"
00034 
00035 #include <ros/package.h>
00036 #include <or_msgs/OrMatchResult.h>
00037 #include <or_msgs/OrObjectNames.h>
00038 #include "ros/ros.h"
00039 #include <stdio.h>
00040 #include <fstream>
00041 
00042 #define THIS ORMatchingModule
00043 
00044 using namespace std;
00045 
00046 THIS::THIS(ros::NodeHandle *nh, std::string inputTopic)
00047 {
00048     m_Stage1Matcher = ORMatchingModule::Stage1MatcherT( Config::getInstance()->getInt( "ObjectRecognition.iStage1Matcher" ) );
00049     m_Stage2Matcher = ORMatchingModule::Stage2MatcherT( Config::getInstance()->getInt( "ObjectRecognition.iStage2Matcher" ) );
00050     m_ImagesRequested = 0;
00051     m_FlannMatcher = 0;
00052 
00053     m_Extractor = DefaultExtractor::createInstance();
00054     ROS_INFO_STREAM( "Selected feature extractor: " << m_Extractor->getName() << std::endl << std::endl << m_Extractor->getDescription()  );
00055 
00056     // subscribe to messages
00057     m_OrCommandSubscriber = nh->subscribe<or_msgs::OrCommand>("/or/commands", 100, &ORMatchingModule::callbackOrCommand, this);
00058     m_ExtractKeyPointsSubscriber = nh->subscribe<or_msgs::ExtractKeyPoints>("/or/extract", 100, &ORMatchingModule::callbackExtractKeyPoints, this);
00059 
00060     // subscribe to image input topic
00061     m_ImageSubscriber = nh->subscribe<sensor_msgs::Image>(inputTopic, 10, &ORMatchingModule::callbackImage, this);
00062 
00063     // advertise messages
00064     m_ORMatchResultPublisher = nh->advertise<or_msgs::OrMatchResult>("or/match_result", 100);
00065     m_ORObjectNamesPublisher = nh->advertise<or_msgs::OrObjectNames>("or/obj_names", 100);
00066     m_DebugImagePublisherColor = nh->advertise<sensor_msgs::Image>("/or/obj_learn_primary_color", 10);
00067 }
00068 
00069 THIS::~THIS()
00070 {
00071     ROS_INFO_STREAM( "Mean processing time: " << Math::mean( m_ProcessingTimes ) << "ms" );
00072     delete m_Extractor;
00073     if(m_FlannMatcher)
00074         delete m_FlannMatcher;
00075 }
00076 
00077 void THIS::callbackOrCommand( const or_msgs::OrCommand::ConstPtr& msg )
00078 {
00079     switch( msg->command )
00080     {
00081     case ORControlModule::UnloadObject:
00082         removeObjectProperties( msg->string_value );
00083         break;
00084 
00085     default:
00086         break;
00087     }
00088 }
00089 
00090 
00091 void THIS::callbackExtractKeyPoints( const or_msgs::ExtractKeyPoints::ConstPtr& msg )
00092 {
00093     //m_SourceId = (ImageSources::SourceId) msg->img_source; // TODO check for correct image source
00094 
00095     m_BoundingBoxes.clear();
00096     for( unsigned i = 0; i < msg->bounding_boxes.size(); i++)
00097     {
00098         Box2D<double> box(msg->bounding_boxes.at(i).minX, msg->bounding_boxes.at(i).minY,
00099                           msg->bounding_boxes.at(i).maxX, msg->bounding_boxes.at(i).maxY);
00100         m_BoundingBoxes.push_back(box);
00101     }
00102     m_ImagesRequested++;
00103 }
00104 
00105 void THIS::callbackImage( const sensor_msgs::Image::ConstPtr& message )
00106 {
00107     if ( m_ImagesRequested > 0 )
00108     {
00109         m_ImagesRequested--;
00110         processImageMessage(message);
00111     }
00112 }
00113 
00114 void THIS::processImageMessage( const sensor_msgs::Image::ConstPtr& message )
00115 {
00116         try
00117         {
00118             cv_bridge::CvImagePtr color_img_ptr;
00119             color_img_ptr = cv_bridge::toCvCopy(message);
00120             cv_bridge::CvImagePtr gray_img_ptr;
00121             gray_img_ptr = cv_bridge::toCvCopy(message, "mono8");
00122 
00123 //            try
00124 //            {
00125 //                color_img_ptr = cv_bridge::toCvCopy(message);
00126 //                cvtColor( color_img_ptr->image, gray_img_ptr->image, CV_BGR2GRAY );
00127 //            }
00128 //            catch (cv_bridge::Exception error)
00129 //            {
00130 //                ROS_ERROR("Error converting Image message to OpenCV image.");
00131 //            }
00132 
00133             ROS_INFO_STREAM( "analyzing gray image" );
00134             m_ImageWidth = gray_img_ptr->image.cols;
00135             m_ImageHeight = gray_img_ptr->image.rows;
00136 
00137             //convert bboxes from image to pixel coords // TODO this is not needed in the ROS version, remove it and replace with correct bounding box handling
00138             std::vector< Box2D<int> > boundingBoxes( m_BoundingBoxes.size() );
00139             for ( unsigned i=0; i<m_BoundingBoxes.size(); i++ )
00140             {
00141                 boundingBoxes[i].setMinX( ( m_BoundingBoxes[i].minX() *  0.5 + 0.5 ) * m_ImageWidth );
00142                 boundingBoxes[i].setMaxX( ( m_BoundingBoxes[i].maxX() *  0.5 + 0.5 ) * m_ImageWidth );
00143                 boundingBoxes[i].setMinY( ( m_BoundingBoxes[i].maxY() * -0.5 + 0.5 ) * m_ImageHeight );
00144                 boundingBoxes[i].setMaxY( ( m_BoundingBoxes[i].minY() * -0.5 + 0.5 ) * m_ImageHeight );
00145                 ROS_INFO_STREAM( "Using bounding box: min = " << boundingBoxes[i].minX() << ", " << boundingBoxes[i].minY() <<  " max = " << boundingBoxes[i].maxX() << ", " << boundingBoxes[i].maxY() );
00146             }
00147 
00148 //            // TODO only for testing: create a "test-mode" demo in a similar way
00149 //            cv::namedWindow( "Display window", CV_WINDOW_AUTOSIZE );// Create a window for display.
00150 //            cv::imshow( "Display window", gray_img_ptr->image );   // Show our image inside it.
00151 //            cv::waitKey(0);
00152 
00153                         //cv::namedWindow( "Display window", CV_WINDOW_AUTOSIZE );// Create a window for display.
00154                         //cv::imshow( "Display window", gray_img_ptr->image );   // Show our image inside it.
00155                         //cv::waitKey(0);
00156                         //cv::imwrite("/home/mk/robbie/homer/experimental/Libraries/object_recognition/inputImage01.png",gray_img_ptr->image);
00157 
00158            processImages ( &gray_img_ptr->image, message->header.seq, boundingBoxes );
00159         }
00160         catch ( const char* exception_msg )
00161         {
00162             std::ostringstream stream;
00163             stream << "Caught exception: " << std::endl << exception_msg;
00164             ROS_ERROR_STREAM ( stream.str() );
00165             throw exception_msg;
00166         }
00167 }
00168 
00169 
00170 void THIS::processImages( cv::Mat *image, int seqNum, std::vector< Box2D<int> > boundingBoxes )
00171 {
00172     std::vector< KeyPoint > *rawKeyPoints = new std::vector< KeyPoint >();
00173 
00174     int startTime = Clock::getInstance()->getTimestamp();
00175 
00176     m_Extractor->setImage( *image );
00177     m_Extractor->getKeyPoints( *rawKeyPoints );
00178 
00179     ROS_INFO_STREAM ( "Found " << rawKeyPoints->size() << " keypoints in " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00180 
00181     processKeyPoints( image, rawKeyPoints, boundingBoxes, seqNum );
00182 
00183     //cv::Mat outImage;
00184     //outImage = &image;
00185     //cv::imwrite("/home/mk/robbie/homer/experimental/Libraries/object_recognition/inputImage02.png",(*image));
00186 
00187 
00188     double deltaT = Clock::getInstance()->getTimestamp() - startTime;
00189     m_ProcessingTimes.push_back( deltaT );
00190     // ROS_INFO_STREAM( "Processing time: " + Tracer::toString(deltaT) + "ms" ); // TODO
00191 }
00192 
00193 
00194 void THIS::processKeyPoints(  cv::Mat *image, std::vector< KeyPoint > *rawKeyPoints, std::vector< Box2D<int> > boundingBoxes, unsigned seqNum )
00195 {
00196     int startTime = Clock::getInstance()->getTimestamp();
00197 
00198     if( boundingBoxes.size() == 0 )
00199     {
00200         boundingBoxes.push_back( Box2D<int>(0,0,image->cols,image->rows) );
00201     }
00202 
00203     std::vector<MatchResult*> matchResults;
00204 
00205     if (rawKeyPoints->size() == 0) {
00206         ROS_ERROR_STREAM("No raw keypoints");
00207     } else {
00208         ROS_INFO_STREAM("Got " << rawKeyPoints->size() << " raw keypoints.");
00209     }
00210     //loop over bounding boxes
00211     for ( unsigned b=0; b<boundingBoxes.size(); b++ )
00212     {
00213         std::vector< KeyPoint > keyPoints;
00214         keyPoints.reserve( rawKeyPoints->size() );
00215 
00216         //maps indices of bounding box keypoints to (raw) scene keypoints
00217         std::vector< unsigned > keyPointIndexMap;
00218         keyPointIndexMap.reserve( rawKeyPoints->size() );
00219 
00220         //get keypoints for this bb
00221         for ( unsigned k=0; k<rawKeyPoints->size(); k++ )
00222         {
00223             if ( boundingBoxes[b].contains( (*rawKeyPoints)[k].x, (*rawKeyPoints)[k].y ) )
00224             {
00225                 //keyPoint is in one bounding box -> save and go to next kp
00226                 keyPoints.push_back( (*rawKeyPoints)[k] );
00227                 keyPointIndexMap.push_back( k );
00228             }
00229         }
00230 
00231         if (keyPoints.size() == 0)
00232             ROS_ERROR_STREAM("No keypoints for bounding box " << b);
00233 
00234         //recreate lookup
00235         if(m_Stage1Matcher==Flann)
00236         {
00237             delete m_FlannMatcher;
00238             m_FlannMatcher = new FLANNMatcher();
00239             m_FlannMatcher->createIndex(&keyPoints);
00240         }
00241 
00242         //match all objects
00243         for ( unsigned i=0; i<m_ObjectList.size(); i++ )
00244         {
00245             MatchResult* matchResult=new MatchResult;
00246             if ( matchObject( &keyPoints, m_ObjectList[i], *matchResult ) )
00247             {
00248                 ROS_INFO_STREAM( "Detected object '" << m_ObjectList[i].getName() << "' in bounding box # " << b );
00249                 matchResult->boundingBoxIndex = b;
00250                 matchResult->keyPointIndexMap = keyPointIndexMap;
00251                 matchResults.push_back( matchResult );
00252             }
00253             else
00254             {
00255                 delete matchResult;
00256             }
00257         }
00258     }
00259 
00260     ROS_INFO_STREAM ( "Matching took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00261 
00262     // TODO - clean up creation of ROS message for ORMatchResult
00263 
00264     // prepare ROS messages for publishing
00265     // create image message
00266     cv_bridge::CvImage result_img;
00267     result_img.image = *image;
00268 
00269     sensor_msgs::Image image_msg;
00270     image_msg = *(result_img.toImageMsg());
00271 
00272     // create KeyPoints message
00273     std::vector<or_msgs::KeyPoint> key_points;
00274     or_msgs::KeyPoint kp;
00275     for( unsigned i = 0; i < rawKeyPoints->size(); i++)
00276     {
00277         kp.x = rawKeyPoints->at(i).x;
00278         kp.y = rawKeyPoints->at(i).y;
00279         kp.scale = rawKeyPoints->at(i).scale;
00280         kp.strength = rawKeyPoints->at(i).strength;
00281         kp.orientation = rawKeyPoints->at(i).orientation;
00282         kp.sign = rawKeyPoints->at(i).sign;
00283         kp.feature_vector = rawKeyPoints->at(i).featureVector;
00284         kp.vector_limits = rawKeyPoints->at(i).vectorLimits;
00285         key_points.push_back(kp);
00286     }
00287 
00288     // create MatchResult message
00289     std::vector<or_msgs::MatchResult> match_results;
00290     or_msgs::MatchResult mr;
00291     for( unsigned i = 0; i < matchResults.size(); i++)
00292     {
00293         mr.object_name = matchResults.at(i)->objectName;
00294         mr.object_type = matchResults.at(i)->objectType;
00295 
00296         cv_bridge::CvImage result;
00297         //result.image = adapt_matchresult_image.asIplImage();
00298         result.image = *image;
00299 
00300         mr.image = *(result.toImageMsg());
00301         mr.image_index = matchResults.at(i)->imageIndex;
00302         mr.image_name = matchResults.at(i)->imageName;
00303 
00304         or_msgs::Point2D point_outline;
00305         for( unsigned outline_index = 0; outline_index < matchResults.at(i)->outline.size(); outline_index++ )
00306         {
00307             point_outline.x = matchResults.at(i)->outline.at(outline_index).x();
00308             point_outline.x = matchResults.at(i)->outline.at(outline_index).y();
00309             mr.outline.push_back(point_outline);
00310         }
00311 
00312         or_msgs::Point2D point_bbox;
00313         for( unsigned bb_index = 0; bb_index < matchResults.at(i)->bBox.size(); bb_index++ )
00314         {
00315             point_bbox.x = matchResults.at(i)->bBox.at(bb_index).x();
00316             point_bbox.x = matchResults.at(i)->bBox.at(bb_index).y();
00317             mr.outline.push_back(point_bbox);
00318         }
00319 
00320         mr.center.x = matchResults.at(i)->center.x();
00321         mr.center.y = matchResults.at(i)->center.y();
00322         mr.bounding_box_index = matchResults.at(i)->boundingBoxIndex;
00323 
00324         for( unsigned kpim_index = 0; kpim_index < matchResults.at(i)->keyPointIndexMap.size(); kpim_index++ )
00325         {
00326             mr.key_point_index_map.push_back(  matchResults.at(i)->keyPointIndexMap.at(kpim_index) );
00327         }
00328 
00329         or_msgs::KeyPoint obj_kp;
00330         for( unsigned kp_index = 0; kp_index < matchResults.at(i)->objectKeyPoints.size(); kp_index++)
00331         {
00332             obj_kp.x = matchResults.at(i)->objectKeyPoints.at(kp_index).x;
00333             obj_kp.y = matchResults.at(i)->objectKeyPoints.at(kp_index).y;
00334             obj_kp.scale = matchResults.at(i)->objectKeyPoints.at(kp_index).scale;
00335             obj_kp.strength = matchResults.at(i)->objectKeyPoints.at(kp_index).strength;
00336             obj_kp.orientation = matchResults.at(i)->objectKeyPoints.at(kp_index).orientation;
00337             obj_kp.sign = matchResults.at(i)->objectKeyPoints.at(kp_index).sign;
00338             obj_kp.feature_vector = matchResults.at(i)->objectKeyPoints.at(kp_index).featureVector;
00339             obj_kp.vector_limits = matchResults.at(i)->objectKeyPoints.at(kp_index).vectorLimits;
00340             mr.object_key_points.push_back(obj_kp);
00341         }
00342 
00343         or_msgs::KeyPointMatch kp_match_1;
00344         std::list<KeyPointMatch>::iterator kp_match_iter_1;
00345         for( kp_match_iter_1 = matchResults.at(i)->stage1Matches.begin(); kp_match_iter_1 != matchResults.at(i)->stage1Matches.end(); kp_match_iter_1++)
00346         {
00347             kp_match_1.index1 = kp_match_iter_1->index1;
00348             kp_match_1.index2 = kp_match_iter_1->index2;
00349             kp_match_1.distance = kp_match_iter_1->distance;
00350             kp_match_1.turn_angle = kp_match_iter_1->turnAngle;
00351             kp_match_1.scale_quotient = kp_match_iter_1->scaleQuotient;
00352             mr.stage1_matches.push_back( kp_match_1 );
00353         }
00354 
00355 
00356         or_msgs::KeyPointMatchArray kp_match_2_arr;
00357         for( unsigned st2m_index = 0; st2m_index < matchResults.at(i)->stage2Matches.size(); st2m_index++)
00358         {
00359             or_msgs::KeyPointMatch kp_match_2;
00360             std::list<KeyPointMatch>::iterator kp_match_iter_2;
00361             for( kp_match_iter_2 = matchResults.at(i)->stage2Matches.at(st2m_index).begin(); kp_match_iter_2 != matchResults.at(i)->stage2Matches.at(st2m_index).end(); kp_match_iter_2++)
00362             {
00363                 kp_match_2.index1 = kp_match_iter_2->index1;
00364                 kp_match_2.index2 = kp_match_iter_2->index2;
00365                 kp_match_2.distance = kp_match_iter_2->distance;
00366                 kp_match_2.turn_angle = kp_match_iter_2->turnAngle;
00367                 kp_match_2.scale_quotient = kp_match_iter_2->scaleQuotient;
00368                 kp_match_2_arr.key_point_match_array.push_back(kp_match_2);
00369             }
00370             mr.stage2_matches.push_back(kp_match_2_arr);
00371         }
00372 
00373         or_msgs::KeyPointMatch kp_match_3;
00374         std::list<KeyPointMatch>::iterator kp_match_iter_3;
00375         for( kp_match_iter_3 = matchResults.at(i)->stage3Matches.begin(); kp_match_iter_3 != matchResults.at(i)->stage3Matches.end(); kp_match_iter_3++)
00376         {
00377             kp_match_3.index1 = kp_match_iter_3->index1;
00378             kp_match_3.index2 = kp_match_iter_3->index2;
00379             kp_match_3.distance = kp_match_iter_3->distance;
00380             kp_match_3.turn_angle = kp_match_iter_3->turnAngle;
00381             kp_match_3.scale_quotient = kp_match_iter_3->scaleQuotient;
00382             mr.stage3_matches.push_back( kp_match_3 );
00383         }
00384 
00385         or_msgs::KeyPoint scene_kp;
00386         for( unsigned scene_kp_index = 0; scene_kp_index < matchResults.at(i)->sceneKeyPointsWithinOutline.size(); scene_kp_index++)
00387         {
00388             scene_kp.x = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).x;
00389             scene_kp.y = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).y;
00390             scene_kp.scale = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).scale;
00391             scene_kp.strength = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).strength;
00392             scene_kp.orientation = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).orientation;
00393             scene_kp.sign = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).sign;
00394             scene_kp.feature_vector = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).featureVector;
00395             scene_kp.vector_limits = matchResults.at(i)->sceneKeyPointsWithinOutline.at(scene_kp_index).vectorLimits;
00396             mr.scene_key_points_within_outline.push_back(scene_kp);
00397         }
00398 
00399         for( unsigned hom_index = 0; hom_index < 9; hom_index++)
00400         {
00401             mr.homography.at(hom_index) = matchResults.at(i)->homography.m_HomMat[hom_index];
00402         }
00403 
00404         match_results.push_back(mr);
00405     }
00406 
00407     // create boundingbox message
00408     std::vector<or_msgs::BoundingBox2D> bboxes;
00409     or_msgs::BoundingBox2D bbox;
00410     for( unsigned bb_index = 0; bb_index < boundingBoxes.size(); bb_index++)
00411     {
00412         bbox.minX = boundingBoxes.at(bb_index).minX();
00413         bbox.minY = boundingBoxes.at(bb_index).minY();
00414         bbox.maxX = boundingBoxes.at(bb_index).maxX();
00415         bbox.maxY = boundingBoxes.at(bb_index).maxY();
00416         bboxes.push_back(bbox);
00417     }
00418 
00419 
00420     // create OrMatchResult message containing the results of the object recognition
00421     or_msgs::OrMatchResult or_match_result_msg;
00422     or_match_result_msg.image = image_msg;
00423     or_match_result_msg.key_points = key_points;
00424     or_match_result_msg.match_results = match_results;
00425     or_match_result_msg.bounding_boxes = bboxes;
00426     or_match_result_msg.seq_num = seqNum;
00427     m_ORMatchResultPublisher.publish(or_match_result_msg);
00428 
00429 
00430     // TODO clean up debug output
00431     if(ros::param::has("/OrNodes/debugOutput") && ros::param::has("/OrNodes/debugOutput"))
00432     {
00433         std::string debugOutput;
00434         ros::param::get("/OrNodes/debugOutput", debugOutput);
00435         std::string strTrue = "/true";
00436 
00437         if(debugOutput==strTrue){
00438 
00439             //prepare data
00440 
00441             cv::Mat img_gray = image->clone();
00442             cv::Mat img;
00443             cvtColor( img_gray, img, CV_GRAY2BGR );
00444 
00445             IplImage imgIplPutText (img);
00446             CvFont font; //font for the cvPutTextFunktion
00447             cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 2.5, 2.5, 0, 8, CV_AA);
00448 
00449             std::stringstream mainInfoText;
00450             mainInfoText<<"Result: "<<matchResults.size()<<" ";
00451             //TRACE_INFO("matchResults: "<<matchResults.size());
00452 
00453             //Iterate over all Objects that was detected at the Image
00454             for(unsigned int i=0; i<matchResults.size();i++){
00455                 MatchResult* mr = matchResults.at(i);
00456                 std::string objectName = mr->objectName;
00457                 Point2D imgCenter = mr->center;
00458                 Point2D objectCenter = mr->homography.transform ( imgCenter );
00459                 std::cout<<"found: "<<objectName<<" at: x: "<<objectCenter.x()<<" y: "<<objectCenter.y()<<"\n";
00460 
00461                 //draw Bounding Box of each Object
00462                 vector<Point2D> bBoxTrans;
00463                 mr->homography.transform( mr->bBox, bBoxTrans );
00464 
00465                 for(unsigned int t=0;t<bBoxTrans.size()-1;t++){
00466                     Point2D p1_robbie = bBoxTrans.at(t);
00467                     Point2D p2_robbie = bBoxTrans.at(t+1);
00468                     cv::Point p1_opencv (p1_robbie.x(),p1_robbie.y());
00469                     cv::Point p2_opencv (p2_robbie.x(),p2_robbie.y());
00470                     cv::line(img,p1_opencv,p2_opencv,cvScalar(0, 0, 255, 0),2);
00471                 }
00472 
00473                 //draw Object name at Objects
00474                 std::stringstream strText;
00475                 strText<<"." <<objectName;
00476                 cvPutText(&imgIplPutText,strText.str().c_str(), cvPoint(objectCenter.x(), objectCenter.y()), &font, cvScalar(255, 255, 0, 0));
00477 
00478             }
00479             //draw Infotext on the top of the Image
00480             cvPutText(&imgIplPutText,mainInfoText.str().c_str(), cvPoint(100, 100), &font, cvScalar(255, 255, 0, 0));
00481 
00482             //write the outputimage on hardrive
00483             if(ros::param::has("/OrNodes/debugOutputPath") && ros::param::has("/OrNodes/debugOutputPath")){
00484                 std::string debugOutputPath;
00485                 ros::param::get("/OrNodes/debugOutputPath", debugOutputPath);
00486                 std::string strDefault = "/default";
00487                 if(debugOutputPath==strDefault){
00488                     debugOutputPath = ros::package::getPath("or_nodes");
00489                 }
00490                 std::stringstream imageOutputFileName;
00491                 imageOutputFileName<<debugOutputPath<<"/debug_matchResult.jpg";
00492                 ROS_ERROR_STREAM("write Output Image to" + imageOutputFileName.str());
00493                 //cv::imwrite(imageOutputFileName.str().c_str(),img);
00494 
00495                 //if you want to see it in a new window:
00496 //                cv::namedWindow("match result", CV_WINDOW_AUTOSIZE);
00497 //                cv::imshow("match result",img);
00498 //                cv::waitKey(0);
00499 
00500                 // publish as debug_rgb
00501                 cv_bridge::CvImagePtr result_image(new cv_bridge::CvImage(std_msgs::Header(), "bgr8", img));
00502                 sensor_msgs::Image image_msg;
00503                 image_msg = *(result_image->toImageMsg());
00504                 m_DebugImagePublisherColor.publish(image_msg);
00505 
00506 
00507             } // end debugpath
00508 
00509         } // end debug Output == true
00510 
00511     } //end debug Output
00512 
00513 } //end processKeyPoint
00514 
00515 
00516 bool THIS::matchObject( std::vector< KeyPoint >* sceneKeyPoints, ObjectProperties& objProperties, MatchResult& matchResult )
00517 {
00518     matchResult.objectName = objProperties.getName();
00519     matchResult.objectType = objProperties.getType();
00520 
00521     std::vector< ImagePropertiesCV* > objImageProperties = objProperties.getImageProperties();
00522 
00523     std::ostringstream stream;
00524     std::ostringstream stream2;
00525 
00526     int minMatches = Config::getInt( "ObjectRecognition.iMinMatchedKeyPoints" );
00527     if ( minMatches < 4 ) { minMatches = 4; }
00528 
00529     bool matchFound = false;
00530     int maxMatches = 0;
00531 
00532     stream2 << "Image:  #matches stage 1 / stage 2 / stage 3 / ratio / success" << std::endl;
00533 
00534     //loop over all object images
00535     for ( unsigned i=0; i < objImageProperties.size(); i++ )
00536     {
00537         std::list< KeyPointMatch > stage1Matches;
00538 
00539         if(m_Stage1Matcher==Flann)
00540         {
00541             stage1Matches = matchStage1Flan( objImageProperties[i] );
00542         }
00543         else
00544         {
00545             stage1Matches = matchStage1( sceneKeyPoints, objImageProperties[i] );
00546         }
00547 
00548         std::vector< std::list< KeyPointMatch> > stage2Matches = matchStage2( sceneKeyPoints, objImageProperties[i], stage1Matches );
00549 
00550         //Number of stage2 matches is 0 if vector is empty of length of first entry (sorted in descending order)
00551         int numMatches2 = stage2Matches.size() > 0 ? stage2Matches[0].size() : 0;
00552 
00553         Homography homography;
00554         int numMatches3 = 0;
00555         bool success = false;
00556         std::list< KeyPointMatch > stage3Matches;
00557         std::vector<KeyPoint> sceneKeyPointsWithinOutline;
00558 
00559         stage3Matches = matchStage3( sceneKeyPoints, objImageProperties[i], stage2Matches, homography );
00560         numMatches3 = stage3Matches.size();
00561         //Compute probability of recognition result
00562         sceneKeyPointsWithinOutline = getSceneKeyPointsWithinOutline(sceneKeyPoints, homography, stage3Matches);
00563 
00564         double ratio = 0;
00565 
00566         if(sceneKeyPointsWithinOutline.size()==0)
00567         {
00568             ratio = 0;
00569         }
00570         else
00571         {
00572             int numObjFeatures = objImageProperties[i]->getKeyPoints()->size();
00573             int numSceneFeatures = sceneKeyPointsWithinOutline.size();
00574             //edit(dg): use smaller number of features for comparison, in case the object and scene image differ very much in size
00575             int numFeaturesMin = numObjFeatures < numSceneFeatures ? numObjFeatures : numSceneFeatures;
00576             ratio = (double) numMatches3/numFeaturesMin;
00577         }
00578 
00579         if(ratio>1)ratio=1; //happens if there are more scene keypoints in bounding box than in real object
00580 
00581 
00582         //fMinMatchPercentage of all available features in bounding box of object should have been matched
00583         if ((numMatches3 >= minMatches) && ratio >= Config::getFloat("ObjectRecognition.fMinMatchPercentage"))
00584         {
00585             //      cout << Config::getFloat("ObjectRecognition.fMinMatchPercentage");
00586             success = true;
00587         }
00588 
00589         stream2 << i << ": " << stage1Matches.size() << " / " << numMatches2 << " / " << numMatches3;
00590 
00591         stream2 << " / " <<  double(int(ratio*100))/100.0;
00592 
00593         //     stream2 << "Ratio: " <<  ratio << " (numMatches3/sceneKeyPointsWithinOutline) = " << numMatches3 << "/" << sceneKeyPointsWithinOutline.size() << endl;
00594 
00595         if (success)
00596         {
00597             stream2 << " X";
00598             if ( numMatches3 > maxMatches )
00599             {
00600                 matchResult.imageIndex = i;
00601                 matchResult.imageName = objImageProperties[i]->getName();
00602                 matchResult.stage1Matches = stage1Matches;
00603                 matchResult.stage2Matches = stage2Matches;
00604                 matchResult.stage3Matches = stage3Matches;
00605                 matchResult.sceneKeyPointsWithinOutline = sceneKeyPointsWithinOutline;
00606                 matchResult.homography = homography;
00607                 maxMatches = numMatches3;
00608                 matchFound = true;
00609             }
00610         }
00611         stream2 << endl;
00612     }
00613 
00614     if ( matchFound )
00615     {
00616         matchResult.outline = *( objImageProperties[ matchResult.imageIndex ]->getOutline() );
00617         matchResult.center = objImageProperties[ matchResult.imageIndex ]->getCenter();
00618 
00619         matchResult.image = objImageProperties[ matchResult.imageIndex ]->getMaskedImageUV();
00620 
00621         matchResult.bBox = objImageProperties[ matchResult.imageIndex ]->getBoundingBox();
00622         matchResult.objectKeyPoints = *( objImageProperties[ matchResult.imageIndex ]->getKeyPoints() );
00623         stream2 << "Detected " << objProperties.getType() << ": " << objProperties.getName() << " (image " << matchResult.imageIndex << " " << matchResult.imageName  << ")";
00624         stream2 << "\nHomography:\n" << matchResult.homography.toString();
00625     }
00626 
00627     //Write result to "log/orResult.txt"
00628     std::fstream filestr;
00629     filestr.open("log/orResult.txt", ios::in | ios::out | ios::ate);
00630 
00631     //FIXME
00632     string fileName;// = inbox<ImageM> ( MessageTypes::IMAGE_M )->getFileName();
00633 
00634     if ( matchFound )
00635     {
00636         double tmpRatio = 0;
00637         if(matchResult.sceneKeyPointsWithinOutline.size()==0)
00638         {
00639             tmpRatio = 0;
00640         }
00641         else
00642         {
00643             tmpRatio = (double) matchResult.stage3Matches.size()/matchResult.sceneKeyPointsWithinOutline.size();
00644         }
00645 
00646         if(tmpRatio>1)tmpRatio=1; //happens if there are more scene keypoints in bounding box than in real object
00647         filestr << fileName << " -> detected " <<  ": " << matchResult.objectName << " (image " << matchResult.imageIndex << " " << matchResult.imageName << " #" << matchResult.objectKeyPoints.size() << "# ) *" << matchResult.stage3Matches.size() << "* with ratio "<< tmpRatio << "\n";
00648     }
00649     else
00650     {
00651         filestr << fileName << " -> failed\n";
00652     }
00653     filestr.close();
00654 
00655     ROS_INFO_STREAM( stream2.str() );
00656     ROS_DEBUG_STREAM( stream.str() );
00657 
00658     return matchFound;
00659 }
00660 
00661 
00662 
00663 std::list< KeyPointMatch > THIS::matchStage1( vector< KeyPoint > *sceneKeyPoints, ImagePropertiesCV *objImageProperties )
00664 {
00665     int startTime = Clock::getInstance()->getTimestamp();
00666 
00667     vector< KeyPoint >* objectImageKeyPoints=objImageProperties->getKeyPoints();
00668 
00669     float maxNNR = Config::getFloat( "ObjectRecognition.NNRMatching.fMaxNearestNeighbourRatio" );
00670     NNRMatcher nnrMatcher( sceneKeyPoints, objectImageKeyPoints );
00671     nnrMatcher.match( maxNNR );
00672 
00673     ROS_INFO_STREAM ( "Nearest-Neighbor Matching of " << sceneKeyPoints->size() << " vs " << objectImageKeyPoints->size() << " keypoints took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00674 
00675     ROS_DEBUG_STREAM( nnrMatcher.getLog() );
00676     return nnrMatcher.getMatches();
00677 }
00678 
00679 
00680 
00681 std::list< KeyPointMatch > THIS::matchStage1Flan( ImagePropertiesCV *objImageProperties )
00682 {
00683     int startTime = Clock::getInstance()->getTimestamp();
00684 
00685     vector< KeyPoint >* objectImageKeyPoints=objImageProperties->getKeyPoints();
00686 
00687     float maxNNR = Config::getFloat( "ObjectRecognition.NNRMatching.fMaxNearestNeighbourRatio" );
00688 
00689     m_FlannMatcher->match(objectImageKeyPoints,maxNNR);
00690 
00691     ROS_INFO_STREAM ( "Nearest-Neighbor Matching with FLAN found " << m_FlannMatcher->getNumMatches() << " of " << objectImageKeyPoints->size() << " keypoints took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00692 
00693     ROS_INFO_STREAM( m_FlannMatcher->getLog() );
00694     return m_FlannMatcher->getMatches();
00695 }
00696 
00697 
00698 
00699 bool sizeComp( std::list< KeyPointMatch> list1, std::list< KeyPointMatch> list2 )
00700 {
00701     return list1.size() > list2.size();
00702 }
00703 
00704 
00705 
00706 std::vector< std::list< KeyPointMatch> > THIS::matchStage2( vector< KeyPoint > *sceneKeyPoints, ImagePropertiesCV *objImageProperties, std::list< KeyPointMatch > &stage1Matches )
00707 {
00708     int startTime = Clock::getInstance()->getTimestamp();
00709 
00710     vector< KeyPoint >* objectImageKeyPoints=objImageProperties->getKeyPoints();
00711 
00712     MatchHelper::calcScaleQuotients( sceneKeyPoints, objectImageKeyPoints, stage1Matches );
00713     MatchHelper::calcTurnAngles( sceneKeyPoints, objectImageKeyPoints, stage1Matches );
00714 
00715     switch ( m_Stage2Matcher )
00716     {
00717     case SimpleHoughClustering:
00718     {
00719         ROS_DEBUG_STREAM("matchStage2 -> SimpleHoughClustering");
00720 
00721         /* perform Hough Clustering on matches */
00722         SimpleHoughClusterer houghClusterer( sceneKeyPoints, objectImageKeyPoints, stage1Matches );
00723         houghClusterer.eliminateByOrientation();
00724         houghClusterer.eliminateByScale();
00725         houghClusterer.eliminateByPosition( Config::getFloat( "ObjectRecognition.SimpleHoughClustering.fMaxMatchDistance" ) );
00726         ROS_DEBUG_STREAM( houghClusterer.getLog() );
00727         vector< std::list< KeyPointMatch> > matches;
00728         matches.push_back(houghClusterer.getMatches());
00729 
00730         ROS_INFO_STREAM ( "SimpleHoughClustering took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00731         return matches;
00732     }
00733 
00734     case HoughClustering:
00735     {
00736         ROS_DEBUG_STREAM("matchStage2 -> HoughClustering");
00737 
00738         /* perform Hough Clustering on matches */
00739         /*
00740       TRACE_SYSTEMINFO("colorFormat: "<<  inbox<ImageM> ( MessageTypes::IMAGE_M )->getColorFormat());
00741 
00742       int w = inbox<ImageM> ( MessageTypes::IMAGE_M )->getRgbImage()->getWidth();
00743       int h = inbox<ImageM> ( MessageTypes::IMAGE_M )->getRgbImage()->getHeight();
00744 
00745       TRACE_SYSTEMINFO("w h ok");*/
00746 
00747         HoughClusterer houghClusterer( sceneKeyPoints, objImageProperties->getKeyPoints(), objImageProperties->getCenter(), m_ImageWidth, m_ImageHeight );
00748 
00749         houghClusterer.setNNMatches(stage1Matches);
00750 
00751         vector< std::list< KeyPointMatch> > matches = houghClusterer.clusterAccumulator();
00752 
00753         if(Config::getInstance()->getBool( "ObjectRecognition.HoughClustering.bPlot" ))
00754         {
00755             std::string path = ros::package::getPath("or_nodes");
00756 
00757             cv::Mat* guiImageNN = new cv::Mat();
00758             houghClusterer.getImage( *guiImageNN );
00759             cv::imwrite(path + "/images/ORHoughAccumulatorNN.ppm", *guiImageNN);
00760 
00761             cv::Mat* guiImageClustered = new cv::Mat();
00762             houghClusterer.getImage( *guiImageClustered );
00763             cv::imwrite(path + "/images/ORHoughAccumulatorClustered.ppm", *guiImageClustered);
00764 
00765             cv::Mat* guiImageDiff = new cv::Mat();
00766             guiImageDiff->resize(guiImageNN->rows, guiImageNN->cols);
00767 
00768             for ( int y=0; y<guiImageNN->rows; y++ )
00769             {
00770                 for ( int x=0; x<guiImageNN->cols; x++ )
00771                 {
00772                     cv::Vec3b diffVec = guiImageNN->at<cv::Vec3b>(y,x) - guiImageClustered->at<cv::Vec3b>(y,x);
00773                     guiImageDiff->at<cv::Vec3b>(y,x) = diffVec;
00774                 }
00775             }
00776             cv::imwrite(path + "/images/ORHoughAccumulatorDiff.ppm", *guiImageDiff);
00777 
00778             delete guiImageNN;
00779             delete guiImageClustered;
00780             delete guiImageDiff;
00781         }
00782 
00783         ROS_INFO_STREAM ( "HoughClustering took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms, " << matches.size() << " hypotheses found." );
00784         ROS_DEBUG_STREAM( houghClusterer.getLog() );
00785 
00786         std::sort( matches.begin(), matches.end(), sizeComp );
00787 
00788         return matches;
00789     }
00790 
00791     default:
00792     {
00793         std::vector< std::list< KeyPointMatch> > matches;
00794         matches.push_back( stage1Matches );
00795         return matches;
00796     }
00797     }
00798 
00799     return std::vector< std::list< KeyPointMatch> > ();
00800 }
00801 
00802 
00803 std::list< KeyPointMatch > THIS::matchStage3( std::vector< KeyPoint > *sceneKeyPoints,
00804                                               ImagePropertiesCV *objImageProperties,
00805                                               std::vector< std::list< KeyPointMatch> > &stage2Matches,
00806                                               Homography &homography )
00807 {
00808     ROS_DEBUG_STREAM("-------- Homography -----------\n\n");
00809 
00810     if(stage2Matches.size()==0)
00811     {
00812         ROS_DEBUG_STREAM("No KeyPointMatch available for homography");
00813         return std::list< KeyPointMatch>();
00814     }
00815 
00816     int startTime = Clock::getInstance()->getTimestamp();
00817 
00818     //iterate stage2 matches; remember max homography; check only stage2 matches with more matches than best homography has
00819 
00820     std::list< KeyPointMatch> maxMatches;
00821     Homography maxHomography;
00822 
00823     int maxHomographies=5;
00824     int numHomographies = stage2Matches.size();
00825 
00826     if ( numHomographies > maxHomographies )
00827     {
00828         numHomographies = maxHomographies;
00829     }
00830 
00831     ROS_INFO_STREAM( "Calculating homographies for " << numHomographies << " of " << stage2Matches.size() << " hypotheses." );
00832 
00833     //Iterate over all bins
00834     for(int i=0;i<numHomographies;++i)
00835     {
00836 
00837         std::list< KeyPointMatch> stage2MatchList = stage2Matches[i];
00838 
00839         if(stage2MatchList.size()<=maxMatches.size())
00840         {
00841             ROS_DEBUG_STREAM("Stop checking bins -> bin "<< i << " has less or equal entries with " << stage2MatchList.size() << " matches compared to max matches " << maxMatches.size() << ".");
00842             break;
00843         }
00844 
00845         ROS_DEBUG_STREAM("Checking bin "<< i << " with " << stage2MatchList.size() << " matches.");
00846 
00847         /*
00848     if(i>= Config::getInt( "ObjectRecognition.Homography.iMaxBins"))
00849     {
00850       TRACE_INFO("Stop checking bins -> bin "<< i << ", because iMaxBins was reached.");
00851       break;
00852     }*/
00853 
00854         //compute homography for current bin
00855         CvHomography cvHomography( sceneKeyPoints, objImageProperties->getKeyPoints(), stage2MatchList );
00856         if ( cvHomography.computeHomography() )
00857         {
00858             //check if the object's bounding box can be transformed with the homography
00859             vector< Point2D > bBox = objImageProperties->getBoundingBox();
00860             homography = cvHomography.getHomography();
00861             if ( homography.checkValidity( bBox ) )
00862             {
00863                 cvHomography.eliminateBadMatches();
00864                 std::list< KeyPointMatch> homMatches = cvHomography.getMatches();
00865 
00866                 //if best bin is worse than current homography take this bin
00867                 if(maxMatches.size()<homMatches.size())
00868                 {
00869                     ROS_DEBUG_STREAM("-> Homography is better with " << homMatches.size() << " matches instead of " << maxMatches.size() << " matches.");
00870                     maxMatches = homMatches;
00871                     maxHomography = homography;
00872                 }
00873                 else
00874                 {
00875                     ROS_DEBUG_STREAM("-> Homography is worse or equal with " << homMatches.size() << " matches instead of " << maxMatches.size() << " matches.");
00876                 }
00877             }
00878             else
00879             {
00880                 ROS_DEBUG_STREAM("-> Homography is invalid");
00881             }
00882         }
00883         else
00884         {
00885             ROS_DEBUG_STREAM("-> Bin has no homography");
00886         }
00887     }
00888 
00889     homography = maxHomography;
00890 
00891     ROS_INFO_STREAM ( "Homography took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00892     return maxMatches;
00893 }
00894 
00895 vector<KeyPoint> THIS::getSceneKeyPointsWithinOutline(vector< KeyPoint >* sceneKeyPoints, Homography& homography, std::list< KeyPointMatch >& stage3Matches)
00896 {
00897     int startTime = Clock::getInstance()->getTimestamp();
00898 
00899     vector<KeyPoint> sceneKeyPointsWithinOutline;
00900 
00901     if(stage3Matches.size()==0)
00902     {
00903         ROS_DEBUG_STREAM( "Getting scene points within outline without stage3 matches took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00904         return sceneKeyPointsWithinOutline;
00905     }
00906 
00907     // Project sceneKeyPoints with homography to get number of features within object outline (KeyPoint for display, Point2D to compute if valid)
00908     std::vector< std::pair< KeyPoint, Point2D> > projSceneKeyPoints;
00909 
00910     //project sceneKeyPoints with homography
00911     for ( vector< KeyPoint>::iterator match = sceneKeyPoints->begin(); match != sceneKeyPoints->end(); match++ )
00912     {
00913         Point2D point = homography.transform(match->position());
00914         if(point.isValid()) projSceneKeyPoints.push_back(std::pair< KeyPoint, Point2D>(*match,point));
00915     }
00916 
00917     if(projSceneKeyPoints.size()>0)
00918     {
00919         //Project stage3Matches with homography to get bounding box of outline
00920         std::vector<Point2D> projObjKeyPoints;
00921         for ( std::list<KeyPointMatch>::iterator match = stage3Matches.begin(); match != stage3Matches.end(); match++ )
00922         {
00923             Point2D point = homography.transform(sceneKeyPoints->at(match->index1).position());
00924             projObjKeyPoints.push_back(point);
00925         }
00926 
00927         //Get bounding box of projected object outline to check if scene keypoints are within this area
00928 
00929         int xmin = 0;
00930         int ymin = 0;
00931         int xmax = 0;
00932         int ymax = 0;
00933 
00934         if (projObjKeyPoints.size() > 0) {
00935             xmin = projObjKeyPoints.at(0).x();
00936             xmax = projObjKeyPoints.at(0).x();
00937             ymin = projObjKeyPoints.at(0).y();
00938             ymax = projObjKeyPoints.at(0).y();
00939         }
00940 
00941         for (unsigned int j=1;j<projObjKeyPoints.size();j++)
00942         {
00943             if ( projObjKeyPoints.at(j).x() < xmin ) xmin = projObjKeyPoints.at(j).x();
00944             if ( projObjKeyPoints.at(j).x() > xmax ) xmax = projObjKeyPoints.at(j).x();
00945             if ( projObjKeyPoints.at(j).y() < ymin ) ymin = projObjKeyPoints.at(j).y();
00946             if ( projObjKeyPoints.at(j).y() > ymax ) ymax = projObjKeyPoints.at(j).y();
00947         }
00948 
00949         for(unsigned int j=0;j<projSceneKeyPoints.size();++j)
00950         {
00951             int posX = projSceneKeyPoints.at(j).second.x();
00952             int posY = projSceneKeyPoints.at(j).second.y();
00953             if(posX>=xmin && posX<=xmax && posY>=ymin && posY<=ymax)
00954             {
00955                 sceneKeyPointsWithinOutline.push_back(projSceneKeyPoints.at(j).first);
00956             }
00957         }
00958     }
00959 
00960     ROS_DEBUG_STREAM( "Getting scene points within outline took " << ( Clock::getInstance()->getTimestamp() - startTime ) << "ms" );
00961 
00962     return sceneKeyPointsWithinOutline;
00963 }
00964 
00965 void THIS::addObjectProperties(ObjectProperties* newProperties)
00966 {
00967     ostringstream stream;
00968     stream << "Adding object: " << endl;
00969     newProperties->printOn( stream );
00970     ROS_INFO_STREAM( stream.str() );
00971 
00972     for ( unsigned i=0; i<m_ObjectList.size(); i++ )
00973     {
00974         if ( m_ObjectList[i].getName() == newProperties->getName() )
00975         {
00976             ROS_WARN_STREAM( "Object " << m_ObjectList[i].getName() << " already loaded. Replacing." );
00977             removeObjectProperties( m_ObjectList[i].getName() );
00978         }
00979     }
00980 
00981     m_ObjectList.push_back( *newProperties );
00982 
00983     sendObjectNames();
00984 }
00985 
00986 
00987 void THIS::removeObjectProperties(std::string name)
00988 {
00989     std::deque<ObjectProperties>::iterator iterProperties;
00990 
00991     for ( iterProperties = m_ObjectList.begin(); iterProperties != m_ObjectList.end(); ++iterProperties )
00992     {
00993         if( name == iterProperties->getName() )
00994         {
00995             ROS_WARN_STREAM( "Unloading object " << name );
00996             m_ObjectList.erase(iterProperties);
00997             return;
00998         }
00999     }
01000     sendObjectNames();
01001 }
01002 
01003 
01004 void THIS::sendObjectNames()
01005 {
01006     // Return changed objectList to GUI
01007     or_msgs::OrObjectNames obj_names_msg;
01008     std::deque<ObjectProperties>::iterator iter;
01009     for (iter = m_ObjectList.begin(); iter != m_ObjectList.end(); ++iter)
01010     {
01011         obj_names_msg.object_names.push_back(iter->getName());
01012         obj_names_msg.object_types.push_back(iter->getType());
01013     }
01014 
01015     m_ORObjectNamesPublisher.publish( obj_names_msg );
01016 }
01017 
01018 #undef THIS


or_nodes
Author(s): raphael
autogenerated on Mon Oct 6 2014 02:53:36