ORLearningModule.cpp
Go to the documentation of this file.
00001 /*******************************************************************************
00002 *  ORLearningModule.cpp
00003 *
00004 *  (C) 2005, 2013 AG Aktives Sehen <agas@uni-koblenz.de>
00005 *                 Universitaet Koblenz-Landau
00006 *
00007 ******************************************************************************/
00008 
00009 #include "ORLearningModule.h"
00010 
00011 #include <sstream>
00012 #include <fstream>
00013 
00014 #include <ros/package.h>
00015 #include <sensor_msgs/image_encodings.h>
00016 #include <or_msgs/OrLearningStatus.h>
00017 #include <or_msgs/VectorObject2D.h>
00018 #include <or_msgs/Point2D.h>
00019 
00020 #include <opencv2/highgui/highgui.hpp>
00021 
00022 #include "Architecture/Config/Config.h"
00023 
00024 #include <boost/archive/binary_iarchive.hpp>
00025 #include <boost/archive/binary_oarchive.hpp>
00026 #include <boost/archive/text_iarchive.hpp>
00027 #include <boost/archive/text_oarchive.hpp>
00028 
00029 #include "Workers/ImageHelpers/ImageMaskCV.h"
00030 #include "Workers/VectorGraphics/VectorObject2D.h"
00031 
00032 #include "ObjectRecognition/ObjectProperties.h"
00033 #include "ObjectRecognition/ImagePropertiesCV.h"
00034 
00035 #include "ConnectedComponentAnalyzer/ConnectedComponentAnalyzer.h"
00036 
00037 
00038 #define THIS ORLearningModule
00039 
00040 THIS::THIS(ros::NodeHandle *nh, std::string inputTopic)
00041 {
00042     m_PathForSaving = Config::getString ( "ObjectRecognition.sDataPath" );
00043 
00044     // histogram parameters
00045     m_HistogramBinSize = Config::getInt ( "ObjectRecognition.Histogram.iBinSize" );
00046 
00047     m_HistogramMinY = Config::getInt ( "ObjectRecognition.Histogram.iMinY" );
00048     m_HistogramMaxY = Config::getInt ( "ObjectRecognition.Histogram.iMaxY" );
00049 
00050     m_HistogramClearRange = Config::getFloat ( "ObjectRecognition.Histogram.fHistogramClearRange" );
00051 
00052     m_OpenRadius = 15.0;
00053     m_BorderSize = 5.0;
00054 
00055     m_BackgroundImageGray = 0;
00056     m_BackgroundImageColor = 0;
00057     m_ForegroundImageGray = 0;
00058     m_ForegroundImageColor = 0;
00059     m_DifferenceThreshold = 0;
00060     m_IsolateLargestSegment = false;
00061 
00062     ADD_MACHINE_STATE ( m_ModuleMachine, IDLE );
00063     ADD_MACHINE_STATE ( m_ModuleMachine, WAITING_FOR_FOREGROUND );
00064     ADD_MACHINE_STATE ( m_ModuleMachine, WAITING_FOR_BACKGROUND );
00065     m_ModuleMachine.setName ( "ObjectLearning State" );
00066 
00067     m_ObjectProperties = new ObjectProperties( );
00068 
00069     m_ImageRequested = false;
00070 
00071     // subscribe to topics
00072     m_ORLearnCommandSubscriber = nh->subscribe<or_msgs::OrLearnCommand>("/or/learn_commands", 10, &ORLearningModule::callbackOrLearnCommand, this);
00073 
00074     // subscribe to image input topic
00075     m_ImageSubscriber = nh->subscribe<sensor_msgs::Image>(inputTopic, 10, &ORLearningModule::callbackImage, this);
00076 
00077     // advertise topics
00078     m_OLPrimaryImagePublisher = nh->advertise<or_msgs::OrImage>("or/obj_learn_primary", 10);
00079     m_OLDebugImagePublisher = nh->advertise<or_msgs::OrImage>("or/debug_image", 10);
00080     m_ORLearningStatusPublisher = nh->advertise<or_msgs::OrLearningStatus>("or/learning_status", 10);
00081 }
00082 
00083 
00084 THIS::~THIS()
00085 {
00086     delete m_BackgroundImageGray;
00087     delete m_BackgroundImageColor;
00088     delete m_ForegroundImageGray;
00089     delete m_ForegroundImageColor;
00090     delete m_ObjectProperties;
00091 }
00092 
00093 void THIS::callbackOrLearnCommand( const or_msgs::OrLearnCommand::ConstPtr& message )
00094 {
00095     try
00096     {
00097         std::ostringstream s;
00098         switch ( message->command )
00099         {
00100         case ORLearningModule::SetDifferenceThreshold:
00101             m_DifferenceThreshold = message->int_value;
00102             ROS_INFO_STREAM ( "Setting difference threshold to " << m_DifferenceThreshold );
00103             if ( m_BackgroundImageGray && m_ForegroundImageGray )
00104             {
00105                 previewIsolatedImage();
00106             }
00107             break;
00108 
00109         case ORLearningModule::SetOpenRadius:
00110             m_OpenRadius = message->int_value;
00111             ROS_INFO_STREAM ( "Setting opening radius to " << m_OpenRadius );
00112             if ( m_BackgroundImageGray && m_ForegroundImageGray )
00113             {
00114                 previewIsolatedImage();
00115             }
00116             break;
00117 
00118         case ORLearningModule::SetBorderSize:
00119             m_BorderSize = message->int_value;
00120             ROS_INFO_STREAM ( "Setting border size to " << m_BorderSize );
00121             if ( m_BackgroundImageGray && m_ForegroundImageGray )
00122             {
00123                 previewIsolatedImage();
00124             }
00125             break;
00126 
00127         case ORLearningModule::SetIsolateLargestSegment:
00128             if ( message->string_value == "true" )
00129             {
00130                 ROS_INFO_STREAM ( "Using single largest segment" );
00131                 m_IsolateLargestSegment = true;
00132             }
00133             else
00134             {
00135                 ROS_INFO_STREAM ( "Using all segments" );
00136                 m_IsolateLargestSegment = false;
00137             }
00138             if ( m_BackgroundImageGray && m_ForegroundImageGray )
00139             {
00140                 previewIsolatedImage();
00141             }
00142             break;
00143 
00144         case ORLearningModule::GrabBackgroundImage:
00145         {
00146             m_ModuleMachine.setState ( WAITING_FOR_BACKGROUND );
00147             ROS_INFO_STREAM ( "Grabbing background image" );
00148             m_ImageRequested = true;
00149             break;
00150         }
00151         case ORLearningModule::GrabForegroundImage:
00152         {
00153             m_ModuleMachine.setState ( WAITING_FOR_FOREGROUND );
00154             ROS_INFO_STREAM ( "Grabbing foreground image" );
00155             m_ImageRequested = true;
00156             break;
00157         }
00158         case ORLearningModule::LoadBackgroundImage:
00159         {
00160             m_ModuleMachine.setState ( WAITING_FOR_BACKGROUND );
00161             std::string fileName = message->string_value;
00162             ROS_INFO_STREAM("Loading background image: " << fileName);
00163             //read and publish image
00164             loadImage(fileName);
00165             break;
00166         }
00167 
00168         case ORLearningModule::LoadForegroundImage:
00169         {
00170             m_ModuleMachine.setState ( WAITING_FOR_FOREGROUND );
00171             std::string fileName = message->string_value;
00172             ROS_INFO_STREAM("Loading foreground image: " << fileName);
00173             //read and publish image
00174             loadImage(fileName);
00175             break;
00176         }
00177         case ORLearningModule::DisplayImage:
00178             ROS_INFO_STREAM ( "Displaying Image #" << message->int_value );
00179             displayImage ( message->int_value );
00180             break;
00181 
00182         case ORLearningModule::SaveImage:
00183         {
00184             ROS_INFO_STREAM ( "Saving Image '" << message->string_value << "'" );
00185             saveImage( message->string_value );
00186             // publish learning status message
00187             or_msgs::OrLearningStatus learn_state_msg;
00188             learn_state_msg.image_names = m_ObjectProperties->getImageNames();
00189             learn_state_msg.object_type = m_ObjectType;
00190             m_ORLearningStatusPublisher.publish(learn_state_msg);
00191             break;
00192         }
00193         case ORLearningModule::DeleteImage:
00194         {
00195             ROS_INFO_STREAM ( "Deleting image #" << message->int_value << "'" );
00196             deleteImage ( message->int_value );
00197             // publish learning status message
00198             or_msgs::OrLearningStatus learn_state_msg;
00199             learn_state_msg.image_names = m_ObjectProperties->getImageNames();
00200             learn_state_msg.object_type = m_ObjectType;
00201             m_ORLearningStatusPublisher.publish(learn_state_msg);
00202             break;
00203         }
00204         case ORLearningModule::SetObjectType:
00205             ROS_INFO_STREAM ( "Setting object type to '" << message->string_value << "'" );
00206             m_ObjectType = message->string_value;
00207             break;
00208 
00209         case ORLearningModule::SaveObject:
00210         {
00211             ROS_INFO_STREAM ( "Saving object as '" << message->string_value << "'" );
00212             saveObject ( message->string_value );
00213             // publish learning status message
00214             or_msgs::OrLearningStatus learn_state_msg;
00215             learn_state_msg.image_names = m_ObjectProperties->getImageNames();
00216             learn_state_msg.object_type = m_ObjectType;
00217             m_ORLearningStatusPublisher.publish(learn_state_msg);
00218             break;
00219         }
00220         case ORLearningModule::LoadObject:
00221         {
00222             ROS_INFO_STREAM ( "Loading object '" << message->string_value << "'" );
00223             loadObject ( message->string_value );
00224             // publish learning status message
00225             or_msgs::OrLearningStatus learn_state_msg;
00226             learn_state_msg.image_names = m_ObjectProperties->getImageNames();
00227             learn_state_msg.object_type = m_ObjectType;
00228             m_ORLearningStatusPublisher.publish(learn_state_msg);
00229             break;
00230         }
00231         } // closes switch
00232         if ( s.str() != "" )
00233         {
00234             ROS_INFO_STREAM ( s.str() );
00235         }
00236     }
00237     catch ( const char* exception_msg )
00238     {
00239         std::ostringstream stream;
00240         stream << "Caught exception: " << std::endl << exception_msg;
00241         ROS_ERROR_STREAM ( stream.str() );
00242         throw exception_msg;
00243     }
00244 }
00245 
00246 void THIS::callbackImage( const sensor_msgs::Image::ConstPtr& message )
00247 {
00248     if(m_ImageRequested)
00249     {
00250         m_ImageRequested = false;
00251         processImageMessage(message);
00252     }
00253 }
00254 
00255 void THIS::processImageMessage( const sensor_msgs::Image::ConstPtr& message )
00256 {
00257     try
00258     {
00259         cv_bridge::CvImagePtr color_img_ptr;
00260         cv_bridge::CvImagePtr gray_img_ptr(new cv_bridge::CvImage);
00261 
00262         try
00263         {
00264             color_img_ptr = cv_bridge::toCvCopy(message);
00265             cvtColor( color_img_ptr->image, gray_img_ptr->image, CV_BGR2GRAY );
00266         }
00267         catch (cv_bridge::Exception error)
00268         {
00269             ROS_ERROR("Error converting Image message to OpenCV image.");
00270         }
00271 
00272         switch ( m_ModuleMachine.state() )
00273         {
00274         case WAITING_FOR_BACKGROUND:
00275             setBackground ( gray_img_ptr, color_img_ptr );
00276             m_ModuleMachine.setState ( IDLE );
00277             break;
00278         case WAITING_FOR_FOREGROUND:
00279             setForeground ( gray_img_ptr, color_img_ptr );
00280             previewIsolatedImage();
00281             m_ModuleMachine.setState ( IDLE );
00282             break;
00283         default: break;
00284         }
00285 
00286         or_msgs::OrImage or_image_msg;
00287         or_image_msg.image_gray = *(gray_img_ptr->toImageMsg());
00288         or_image_msg.image_color = *(color_img_ptr->toImageMsg());
00289         or_image_msg.filename = "";
00290         m_OLDebugImagePublisher.publish(or_image_msg);
00291     }
00292     catch ( const char* exception_msg )
00293     {
00294         std::ostringstream stream;
00295         stream << "Caught exception: " << std::endl << exception_msg;
00296         ROS_ERROR_STREAM ( stream.str() );
00297         throw exception_msg;
00298     }
00299 }
00300 
00301 
00302 void THIS::loadImage(std::string fileName)
00303 {
00304     cv_bridge::CvImage gray_image, color_image;
00305     gray_image.image = cv::imread(fileName.c_str(), CV_LOAD_IMAGE_GRAYSCALE);
00306     gray_image.encoding = sensor_msgs::image_encodings::TYPE_8UC1;
00307     color_image.image = cv::imread(fileName.c_str(), CV_LOAD_IMAGE_COLOR);
00308     color_image.encoding = sensor_msgs::image_encodings::TYPE_8UC3;
00309 
00310     or_msgs::OrImage or_image_msg;
00311     or_image_msg.image_gray = *(gray_image.toImageMsg());
00312     or_image_msg.image_color = *(color_image.toImageMsg());
00313     or_image_msg.filename = fileName;
00314     m_OLDebugImagePublisher.publish(or_image_msg);
00315 
00316     // TODO testcode remove this
00317     //    cv_bridge::CvImagePtr test = cv_bridge::toCvCopy(or_image_msg.image_gray);
00318     //    imwrite("/home/vseib/Desktop/test_gray.png", test->image);
00319     //    cv_bridge::CvImagePtr test2 = cv_bridge::toCvCopy(or_image_msg.image_color);
00320     //    imwrite("/home/vseib/Desktop/test_color.png", test2->image);
00321 }
00322 
00323 void THIS::setBackground ( cv_bridge::CvImagePtr gray_image, cv_bridge::CvImagePtr color_image )
00324 {
00325     //safe gray image
00326     if ( m_BackgroundImageGray ) { delete m_BackgroundImageGray; }
00327     m_BackgroundImageGray = new cv::Mat(gray_image->image);
00328 
00329     //safe color image
00330     if ( m_BackgroundImageColor ) { delete m_BackgroundImageColor; }
00331     m_BackgroundImageColor = new cv::Mat(color_image->image);
00332 
00333     // send color image to OLPrimary-GUI
00334     or_msgs::OrImage or_color_image_msg;
00335     or_color_image_msg.image_color = *(color_image->toImageMsg());
00336     m_OLPrimaryImagePublisher.publish(or_color_image_msg);
00337 }
00338 
00339 void THIS::setForeground ( cv_bridge::CvImagePtr gray_image, cv_bridge::CvImagePtr color_image )
00340 {
00341     //safe gray image
00342     if ( m_ForegroundImageGray ) { delete m_ForegroundImageGray; }
00343     m_ForegroundImageGray = new cv::Mat(gray_image->image);
00344 
00345     //safe color image
00346     if ( m_ForegroundImageColor ) { delete m_ForegroundImageColor; }
00347     m_ForegroundImageColor = new cv::Mat(color_image->image);
00348 }
00349 
00350 
00351 void THIS::saveImage ( std::string name )
00352 {
00353     ImagePropertiesCV* imageProperties = makeImageProperties( name, true );
00354     if ( imageProperties )
00355     {
00356         imageProperties->calculateProperties();
00357         m_ObjectProperties->addImageProperties( imageProperties );
00358     }
00359 }
00360 
00361 
00362 void THIS::displayImage ( int index )
00363 {
00364     std::vector<ImagePropertiesCV*> pVec = m_ObjectProperties->getImageProperties( );
00365 
00366     if ( index > (int)pVec.size() )
00367     {
00368         ROS_ERROR_STREAM( "Image #" << index << " does not exist!" );
00369         return;
00370     }
00371 
00372     const ImagePropertiesCV* imageProperties = pVec[ index ];
00373 
00374     // send color image to OLPrimary-GUI
00375     cv_bridge::CvImagePtr cv_ptr_color;
00376     cv_ptr_color->image = *(imageProperties->getMaskedImageUV()); // TODO instead of UV an rgb image was saved: does this change something?
00377     cv_ptr_color->encoding = sensor_msgs::image_encodings::TYPE_8UC3;
00378     or_msgs::OrImage or_image_msg;
00379     or_image_msg.image_color = *(cv_ptr_color->toImageMsg());
00380 
00381     const std::vector< KeyPoint >* keyPoints = imageProperties->getKeyPoints();
00382 
00383     for ( unsigned kp = 0; kp < keyPoints->size(); kp++ )
00384     {
00385         // TODO make creation of ROS messages nicer
00386 
00387         VectorObject2D arrow ( ( *keyPoints ) [kp].getCircle(), 0, 0, 0, 5.0 );
00388         // convert VectorObject2D to ROS message
00389         or_msgs::VectorObject2D arrow_msg;
00390         arrow_msg.r = arrow.r();
00391         arrow_msg.g = arrow.g();
00392         arrow_msg.b = arrow.b();
00393         arrow_msg.line_width = arrow.lineWidth();
00394         std::vector<Point2D> tmp_vertices = arrow.vertices();
00395         for(unsigned i = 0; i < tmp_vertices.size(); i++)
00396         {
00397             or_msgs::Point2D point;
00398             point.x = tmp_vertices.at(i).x();
00399             point.y = tmp_vertices.at(i).y();
00400             arrow_msg.vertices.push_back(point);
00401         }
00402         or_image_msg.vector_objects.push_back( arrow_msg );
00403 
00404 
00405         VectorObject2D arrow2 ( ( *keyPoints ) [kp].getCircle(), 1, 1, 1, 3.0 );
00406         // convert VectorObject2D to ROS message
00407         or_msgs::VectorObject2D arrow_msg2;
00408         arrow_msg2.r = arrow2.r();
00409         arrow_msg2.g = arrow2.g();
00410         arrow_msg2.b = arrow2.b();
00411         arrow_msg2.line_width = arrow2.lineWidth();
00412         tmp_vertices = arrow2.vertices();
00413         for(unsigned i = 0; i < tmp_vertices.size(); i++)
00414         {
00415             or_msgs::Point2D point;
00416             point.x = tmp_vertices.at(i).x();
00417             point.y = tmp_vertices.at(i).y();
00418             arrow_msg.vertices.push_back(point);
00419         }
00420         or_image_msg.vector_objects.push_back( arrow_msg2 );
00421     }
00422 
00423     Point2D center = imageProperties->getCenter();
00424 
00425     int size = imageProperties->getImageMask()->getWidth() / 20;
00426 
00427     ROS_INFO_STREAM( center.x() << " " << center.y() );
00428 
00429     std::vector<Point2D> vLine;
00430     vLine.push_back( center-Point2D( 0, size ) );
00431     vLine.push_back( center+Point2D( 0, size ) );
00432     or_msgs::VectorObject2D v_line_msg;
00433     for(unsigned i = 0; i < vLine.size(); i++)
00434     {
00435         or_msgs::Point2D point;
00436         point.x = vLine.at(i).x();
00437         point.y = vLine.at(i).y();
00438         v_line_msg.vertices.push_back(point);
00439     }
00440     v_line_msg.r = 1;
00441     v_line_msg.g = 1;
00442     v_line_msg.b = 1;
00443     v_line_msg.line_width = 3.0;
00444     or_image_msg.vector_objects.push_back(v_line_msg);
00445     v_line_msg.r = 0;
00446     v_line_msg.g = 0;
00447     v_line_msg.b = 1;
00448     v_line_msg.line_width = 1.0;
00449     or_image_msg.vector_objects.push_back(v_line_msg);
00450 
00451 
00452     std::vector<Point2D> hLine;
00453     hLine.push_back( center-Point2D( size, 0 ) );
00454     hLine.push_back( center+Point2D( size, 0 ) );
00455     or_msgs::VectorObject2D h_line_msg;
00456     for(unsigned i = 0; i < hLine.size(); i++)
00457     {
00458         or_msgs::Point2D point;
00459         point.x = hLine.at(i).x();
00460         point.y = hLine.at(i).y();
00461         h_line_msg.vertices.push_back(point);
00462     }
00463     h_line_msg.r = 1;
00464     h_line_msg.g = 1;
00465     h_line_msg.b = 1;
00466     h_line_msg.line_width = 3.0;
00467     or_image_msg.vector_objects.push_back(h_line_msg);
00468     h_line_msg.r = 0;
00469     h_line_msg.g = 0;
00470     h_line_msg.b = 1;
00471     h_line_msg.line_width = 1.0;
00472     or_image_msg.vector_objects.push_back(h_line_msg);
00473 
00474 
00475     VectorObject2D border ( *(imageProperties->getOutline()), 1, 0, 0, 1.0 );
00476     // convert VectorObject2D to ROS message
00477     or_msgs::VectorObject2D vector_msg;
00478     vector_msg.r = border.r();
00479     vector_msg.g = border.g();
00480     vector_msg.b = border.b();
00481     vector_msg.line_width = border.lineWidth();
00482 
00483     std::vector<Point2D> tmp_vertices = border.vertices();
00484     for(unsigned i = 0; i < tmp_vertices.size(); i++)
00485     {
00486         or_msgs::Point2D point;
00487         point.x = tmp_vertices.at(i).x();
00488         point.y = tmp_vertices.at(i).y();
00489         vector_msg.vertices.push_back(point);
00490     }
00491     or_image_msg.vector_objects.push_back(vector_msg);
00492 
00493     m_OLPrimaryImagePublisher.publish(or_image_msg);
00494 }
00495 
00496 void THIS::deleteImage ( int index )
00497 {
00498     m_ObjectProperties->deleteImageProperties( index );
00499 }
00500 
00501 ImagePropertiesCV* THIS::makeImageProperties( std::string name, bool crop )
00502 {
00503     if ( !m_BackgroundImageGray || !m_BackgroundImageColor )
00504     {
00505         ROS_ERROR_STREAM ( "Background image missing!" );
00506         return 0;
00507     }
00508     if ( !m_ForegroundImageGray || !m_ForegroundImageColor )
00509     {
00510         ROS_ERROR_STREAM ( "Foreground image missing!" );
00511         return 0;
00512     }
00513 
00514     ImageMaskCV mask( *m_ForegroundImageGray, *m_ForegroundImageColor, *m_BackgroundImageGray, *m_BackgroundImageColor, m_DifferenceThreshold );
00515 
00516     mask.erode ( 1.0 );
00517     mask.dilate ( m_OpenRadius );
00518     mask.erode ( m_OpenRadius - 1.0 );
00519 
00520     //delete all mask segments but largest
00521     if ( m_IsolateLargestSegment )
00522     {
00523         ConnectedComponentAnalyzer::isolateLargestSegment ( mask.getData(), mask.getWidth(), mask.getHeight() );
00524     }
00525 
00526     mask.dilate ( m_BorderSize );
00527 
00528     if ( crop )
00529     {
00530         //crop image & mask
00531 
00532         Box2D<int> area = mask.getBoundingBox();
00533 
00534         float borderSize = Config::getFloat( "ObjectRecognition.fObjectImageBorder" );
00535         int border = ( area.width()+area.height() ) / 2 * borderSize;
00536         area.expand( border+2 );
00537 
00538         area.clip( Box2D<int>( 0, 0, m_ForegroundImageGray->cols, m_ForegroundImageColor->rows ) );
00539 
00540         int newWidth = area.width();
00541         int newHeight= area.height();
00542         int minX = area.minX();
00543         int minY = area.minY();
00544 
00545         ImageMaskCV *croppedMask = mask.subMask( area );
00546 
00547         cv::Mat* croppedGrayImage = new cv::Mat(newHeight, newWidth, CV_8UC1);
00548         for ( int y=0; y<newHeight; y++ )
00549         {
00550             for ( int x=0; x<newWidth; x++ )
00551             {
00552                 croppedGrayImage->at<unsigned char>(y,x) = m_ForegroundImageGray->at<unsigned char>(y+minY, x+minX);
00553             }
00554         }
00555 
00556         cv::Mat* croppedColorImage = new cv::Mat(newHeight, newWidth, CV_8UC3);
00557         for ( int y=0; y<newHeight; y++ )
00558         {
00559             for ( int x=0; x<newWidth; x++ )
00560             {
00561                 croppedColorImage->at<cv::Vec3b>(y,x) = m_ForegroundImageGray->at<cv::Vec3b>(y+minY, x+minX);
00562             }
00563         }
00564         ImagePropertiesCV* imageProperties = new ImagePropertiesCV( name, croppedGrayImage, croppedColorImage, croppedMask );
00565         return imageProperties;
00566     }
00567     else
00568     {
00569         ImagePropertiesCV* imageProperties = new ImagePropertiesCV( name, new cv::Mat( *m_ForegroundImageGray ),
00570                                                                     new cv::Mat( *m_ForegroundImageColor ), new ImageMaskCV( mask ) );
00571         return imageProperties;
00572     }
00573 
00574 }
00575 
00576 void THIS::previewIsolatedImage()
00577 {
00578     ImagePropertiesCV* imageProperties = makeImageProperties( "", false );
00579 
00580     if ( imageProperties )
00581     {
00582         // send color image to OLPrimary-GUI
00583         cv_bridge::CvImagePtr cv_ptr_color(new cv_bridge::CvImage);
00584         cv_ptr_color->image = *(imageProperties->getMaskedImageUV());
00585         cv_ptr_color->encoding = sensor_msgs::image_encodings::TYPE_8UC3;
00586         or_msgs::OrImage or_image_msg;
00587         or_image_msg.image_color = *(cv_ptr_color->toImageMsg());
00588 
00589         imageProperties->traceOutline();
00590         VectorObject2D border ( *(imageProperties->getOutline()), 1, 0, 0, 1.0 );
00591 
00592         // convert VectorObject2D to ROS message
00593         or_msgs::VectorObject2D vector_msg;
00594         vector_msg.r = border.r();
00595         vector_msg.g = border.g();
00596         vector_msg.b = border.b();
00597         vector_msg.line_width = border.lineWidth();
00598 
00599         std::vector<Point2D> tmp_vertices = border.vertices();
00600         for(unsigned i = 0; i < tmp_vertices.size(); i++)
00601         {
00602             or_msgs::Point2D point;
00603             point.x = tmp_vertices.at(i).x();
00604             point.y = tmp_vertices.at(i).y();
00605             vector_msg.vertices.push_back(point);
00606         }
00607 
00608         or_image_msg.vector_objects.push_back(vector_msg);
00609         m_OLPrimaryImagePublisher.publish(or_image_msg);
00610     }
00611 
00612     delete imageProperties;
00613 }
00614 
00615 void THIS::saveObject ( std::string objectName )
00616 {
00617     m_ObjectProperties->setName( objectName );
00618     m_ObjectProperties->setType( m_ObjectType );
00619 
00620     std::string path = ros::package::getPath("or_nodes");
00621     std::string filename = path + m_PathForSaving + objectName + ".objprop";
00622 
00623     // write objectProperties file
00624     std::ofstream out ( filename.c_str() );
00625     boost::archive::text_oarchive oa(out);
00626     oa << m_ObjectProperties;
00627 
00628     delete m_ObjectProperties;
00629     m_ObjectProperties = new ObjectProperties();
00630 
00631     ROS_INFO_STREAM ( "Object saved to " << filename );
00632 }
00633 
00634 
00635 void THIS::loadObject ( std::string filename )
00636 {
00637     delete m_ObjectProperties;
00638 
00639     //std::string filename = m_PathForSaving + objectName + ".objprop";
00640 
00641     ROS_INFO_STREAM( "Loading " + filename );
00642     std::ifstream ifs( filename.c_str() );
00643     boost::archive::text_iarchive ia(ifs);
00644     m_ObjectProperties = new ObjectProperties();
00645     ia >> m_ObjectProperties;
00646     ifs.close();
00647 
00648     m_ObjectType = m_ObjectProperties->getType();
00649     ROS_INFO_STREAM( m_ObjectType );
00650 }
00651 
00652 #undef THIS


or_nodes
Author(s): Viktor Seib
autogenerated on Tue Jan 7 2014 11:24:09