ORLearningModule.cpp
Go to the documentation of this file.
00001 /*******************************************************************************
00002 *  ORLearningModule.cpp
00003 *
00004 *  (C) 2005, 2013 AG Aktives Sehen <agas@uni-koblenz.de>
00005 *                 Universitaet Koblenz-Landau
00006 *
00007 ******************************************************************************/
00008 
00009 #include "ORLearningModule.h"
00010 
00011 #include <sstream>
00012 #include <fstream>
00013 
00014 #include <ros/package.h>
00015 #include <sensor_msgs/image_encodings.h>
00016 #include <or_msgs/OrLearningStatus.h>
00017 #include <or_msgs/VectorObject2D.h>
00018 #include <or_msgs/Point2D.h>
00019 
00020 #include <opencv2/highgui/highgui.hpp>
00021 
00022 #include "Architecture/Config/Config.h"
00023 
00024 #include <boost/archive/binary_iarchive.hpp>
00025 #include <boost/archive/binary_oarchive.hpp>
00026 #include <boost/archive/text_iarchive.hpp>
00027 #include <boost/archive/text_oarchive.hpp>
00028 
00029 //#include "Workers/ImageHelpers/ImageMaskCV.h"
00030 #include "or_libs/src/KeyPointExtraction/ImageMaskCV.h"
00031 #include "Workers/VectorGraphics/VectorObject2D.h"
00032 
00033 #include "ObjectRecognition/ObjectProperties.h"
00034 #include "ObjectRecognition/ImagePropertiesCV.h"
00035 
00036 #include "ConnectedComponentAnalyzer/ConnectedComponentAnalyzer.h"
00037 
00038 
00039 #define THIS ORLearningModule
00040 
00041 THIS::THIS(ros::NodeHandle *nh, std::string inputTopic)
00042 {
00043     m_PathForSaving = Config::getString ( "ObjectRecognition.sDataPath" );
00044 
00045     // histogram parameters
00046     m_HistogramBinSize = Config::getInt ( "ObjectRecognition.Histogram.iBinSize" );
00047 
00048     m_HistogramMinY = Config::getInt ( "ObjectRecognition.Histogram.iMinY" );
00049     m_HistogramMaxY = Config::getInt ( "ObjectRecognition.Histogram.iMaxY" );
00050 
00051     m_HistogramClearRange = Config::getFloat ( "ObjectRecognition.Histogram.fHistogramClearRange" );
00052 
00053     m_OpenRadius = 15.0;
00054     m_BorderSize = 5.0;
00055 
00056     m_BackgroundImageGray = 0;
00057     m_BackgroundImageColor = 0;
00058     m_ForegroundImageGray = 0;
00059     m_ForegroundImageColor = 0;
00060     m_DifferenceThreshold = 25; // TODO should be zero and then initialized to value in gui
00061     m_IsolateLargestSegment = false;
00062 
00063     ADD_MACHINE_STATE ( m_ModuleMachine, IDLE );
00064     ADD_MACHINE_STATE ( m_ModuleMachine, WAITING_FOR_FOREGROUND );
00065     ADD_MACHINE_STATE ( m_ModuleMachine, WAITING_FOR_BACKGROUND );
00066     m_ModuleMachine.setName ( "ObjectLearning State" );
00067 
00068     m_ObjectProperties = new ObjectProperties( );
00069 
00070     m_ImageRequested = false;
00071 
00072     // subscribe to topics
00073     m_ORLearnCommandSubscriber = nh->subscribe<or_msgs::OrLearnCommand>("/or/learn_commands", 10, &ORLearningModule::callbackOrLearnCommand, this);
00074 
00075     // subscribe to image input topic
00076     m_ImageSubscriber = nh->subscribe<sensor_msgs::Image>(inputTopic, 10, &ORLearningModule::callbackImage, this);
00077 
00078     // advertise topics
00079     m_ORLearningStatusPublisher = nh->advertise<or_msgs::OrLearningStatus>("/or/learning_status", 10);
00080     m_OLPrimaryImagePublisher = nh->advertise<or_msgs::OrImage>("/or/obj_learn_primary", 10);
00081     m_OLPrimaryImagePublisherColor = nh->advertise<sensor_msgs::Image>("/or/obj_learn_primary_color", 10);
00082     m_OLDebugImagePublisher = nh->advertise<or_msgs::OrImage>("/or/debug_image", 10);
00083     m_DebugImagePublisherGray = nh->advertise<sensor_msgs::Image>("/or/debug_image_gray", 10);
00084     m_DebugImagePublisherColor = nh->advertise<sensor_msgs::Image>("/or/debug_image_color", 10);
00085 }
00086 
00087 
00088 THIS::~THIS()
00089 {
00090     delete m_BackgroundImageGray;
00091     delete m_BackgroundImageColor;
00092     delete m_ForegroundImageGray;
00093     delete m_ForegroundImageColor;
00094     delete m_ObjectProperties;
00095 }
00096 
00097 void THIS::callbackOrLearnCommand( const or_msgs::OrLearnCommand::ConstPtr& message )
00098 {
00099     try
00100     {
00101         std::ostringstream s;
00102         switch ( message->command )
00103         {
00104         case ORLearningModule::SetDifferenceThreshold:
00105             m_DifferenceThreshold = message->int_value;
00106             ROS_INFO_STREAM ( "Setting difference threshold to " << m_DifferenceThreshold );
00107             if ( m_BackgroundImageGray && m_ForegroundImageGray )
00108             {
00109                 previewIsolatedImage();
00110             }
00111             break;
00112 
00113         case ORLearningModule::SetOpenRadius:
00114             m_OpenRadius = message->int_value;
00115             ROS_INFO_STREAM ( "Setting opening radius to " << m_OpenRadius );
00116             if ( m_BackgroundImageGray && m_ForegroundImageGray )
00117             {
00118                 previewIsolatedImage();
00119             }
00120             break;
00121 
00122         case ORLearningModule::SetBorderSize:
00123             m_BorderSize = message->int_value;
00124             ROS_INFO_STREAM ( "Setting border size to " << m_BorderSize );
00125             if ( m_BackgroundImageGray && m_ForegroundImageGray )
00126             {
00127                 previewIsolatedImage();
00128             }
00129             break;
00130 
00131         case ORLearningModule::SetIsolateLargestSegment:
00132             if ( message->string_value == "true" )
00133             {
00134                 ROS_INFO_STREAM ( "Using single largest segment" );
00135                 m_IsolateLargestSegment = true;
00136             }
00137             else
00138             {
00139                 ROS_INFO_STREAM ( "Using all segments" );
00140                 m_IsolateLargestSegment = false;
00141             }
00142             if ( m_BackgroundImageGray && m_ForegroundImageGray )
00143             {
00144                 previewIsolatedImage();
00145             }
00146             break;
00147 
00148         case ORLearningModule::GrabBackgroundImage:
00149         {
00150             m_ModuleMachine.setState ( WAITING_FOR_BACKGROUND );
00151             ROS_INFO_STREAM ( "Grabbing background image" );
00152             m_ImageRequested = true;
00153             break;
00154         }
00155         case ORLearningModule::GrabForegroundImage:
00156         {
00157             m_ModuleMachine.setState ( WAITING_FOR_FOREGROUND );
00158             ROS_INFO_STREAM ( "Grabbing foreground image" );
00159             m_ImageRequested = true;
00160             break;
00161         }
00162         case ORLearningModule::LoadBackgroundImage:
00163         {
00164             m_ModuleMachine.setState ( WAITING_FOR_BACKGROUND );
00165             std::string fileName = message->string_value;
00166             ROS_INFO_STREAM("Loading background image: " << fileName);
00167             //read and publish image
00168             loadImage(fileName);
00169             break;
00170         }
00171 
00172         case ORLearningModule::LoadForegroundImage:
00173         {
00174             m_ModuleMachine.setState ( WAITING_FOR_FOREGROUND );
00175             std::string fileName = message->string_value;
00176             ROS_INFO_STREAM("Loading foreground image: " << fileName);
00177             //read and publish image
00178             loadImage(fileName);
00179             break;
00180         }
00181         case ORLearningModule::DisplayImage:
00182             ROS_INFO_STREAM ( "Displaying Image #" << message->int_value );
00183             displayImage ( message->int_value );
00184             break;
00185 
00186         case ORLearningModule::SaveImage:
00187         {
00188             ROS_INFO_STREAM ( "Saving Image '" << message->string_value << "'" );
00189             saveImage( message->string_value );
00190             // publish learning status message
00191             or_msgs::OrLearningStatus learn_state_msg;
00192             learn_state_msg.image_names = m_ObjectProperties->getImageNames();
00193             learn_state_msg.object_type = m_ObjectType;
00194             m_ORLearningStatusPublisher.publish(learn_state_msg);
00195             break;
00196         }
00197         case ORLearningModule::DeleteImage:
00198         {
00199             ROS_INFO_STREAM ( "Deleting image #" << message->int_value << "'" );
00200             deleteImage ( message->int_value );
00201             // publish learning status message
00202             or_msgs::OrLearningStatus learn_state_msg;
00203             learn_state_msg.image_names = m_ObjectProperties->getImageNames();
00204             learn_state_msg.object_type = m_ObjectType;
00205             m_ORLearningStatusPublisher.publish(learn_state_msg);
00206             break;
00207         }
00208         case ORLearningModule::SetObjectType:
00209             ROS_INFO_STREAM ( "Setting object type to '" << message->string_value << "'" );
00210             m_ObjectType = message->string_value;
00211             break;
00212 
00213         case ORLearningModule::SaveObject:
00214         {
00215             ROS_INFO_STREAM ( "Saving object as '" << message->string_value << "'" );
00216             saveObject ( message->string_value );
00217             // publish learning status message
00218             or_msgs::OrLearningStatus learn_state_msg;
00219             learn_state_msg.image_names = m_ObjectProperties->getImageNames();
00220             learn_state_msg.object_type = m_ObjectType;
00221             m_ORLearningStatusPublisher.publish(learn_state_msg);
00222             break;
00223         }
00224         case ORLearningModule::LoadObject:
00225         {
00226             ROS_INFO_STREAM ( "Loading object '" << message->string_value << "'" );
00227             loadObject ( message->string_value );
00228             // publish learning status message
00229             or_msgs::OrLearningStatus learn_state_msg;
00230             learn_state_msg.image_names = m_ObjectProperties->getImageNames();
00231             learn_state_msg.object_type = m_ObjectType;
00232             m_ORLearningStatusPublisher.publish(learn_state_msg);
00233             break;
00234         }
00235         } // closes switch
00236         if ( s.str() != "" )
00237         {
00238             ROS_INFO_STREAM ( s.str() );
00239         }
00240     }
00241     catch ( const char* exception_msg )
00242     {
00243         std::ostringstream stream;
00244         stream << "Caught exception: " << std::endl << exception_msg;
00245         ROS_ERROR_STREAM ( stream.str() );
00246         throw exception_msg;
00247     }
00248 }
00249 
00250 void THIS::callbackImage( const sensor_msgs::Image::ConstPtr& message )
00251 {
00252     if(m_ImageRequested)
00253     {
00254         m_ImageRequested = false;
00255         processImageMessage(message);
00256     }
00257 }
00258 
00259 void THIS::processImageMessage( const sensor_msgs::Image::ConstPtr& message )
00260 {
00261     try
00262     {
00263         cv_bridge::CvImagePtr color_img_ptr;
00264         color_img_ptr = cv_bridge::toCvCopy(message);
00265         cv_bridge::CvImagePtr gray_img_ptr;
00266         gray_img_ptr = cv_bridge::toCvCopy(message, "mono8");
00267 
00268 
00269 //        try
00270 //        {
00271 //            cvtColor( color_img_ptr->image, gray_img_ptr->image, CV_BGR2GRAY,1);
00272 //            cv::namedWindow("Object - gray", CV_WINDOW_AUTOSIZE);
00273 //            cv::imshow( "Object - gray", gray_img_ptr->image );
00274 //            cv::namedWindow("Object", CV_WINDOW_AUTOSIZE);
00275 //            cv::imshow( "Object", color_img_ptr->image );
00276 //            cv::waitKey(0);
00277 //        }
00278 //        catch (cv_bridge::Exception error)
00279 //        {
00280 //            ROS_ERROR("Error converting Image message to OpenCV image.");
00281 //        }
00282 
00283         switch ( m_ModuleMachine.state() )
00284         {
00285         case WAITING_FOR_BACKGROUND:
00286             setBackground ( gray_img_ptr, color_img_ptr );
00287             m_ModuleMachine.setState ( IDLE );
00288             break;
00289         case WAITING_FOR_FOREGROUND:
00290             setForeground ( gray_img_ptr, color_img_ptr );
00291             previewIsolatedImage();
00292             m_ModuleMachine.setState ( IDLE );
00293             break;
00294         default: break;
00295         }
00296     }
00297     catch ( const char* exception_msg )
00298     {
00299         std::ostringstream stream;
00300         stream << "Caught exception: " << std::endl << exception_msg;
00301         ROS_ERROR_STREAM ( stream.str() );
00302         throw exception_msg;
00303     }
00304 }
00305 
00306 
00307 void THIS::loadImage(std::string fileName)
00308 {
00309     cv_bridge::CvImage gray_image, color_image;
00310     gray_image.image = cv::imread(fileName.c_str(), CV_LOAD_IMAGE_GRAYSCALE);
00311     gray_image.encoding = sensor_msgs::image_encodings::TYPE_8UC1;
00312     color_image.image = cv::imread(fileName.c_str(), CV_LOAD_IMAGE_COLOR);
00313     color_image.encoding = sensor_msgs::image_encodings::TYPE_8UC3;
00314 
00315     or_msgs::OrImage or_image_msg;
00316     or_image_msg.image_gray = *(gray_image.toImageMsg());
00317     or_image_msg.image_color = *(color_image.toImageMsg());
00318     or_image_msg.filename = fileName;
00319     m_OLDebugImagePublisher.publish(or_image_msg);
00320 
00321     // TODO here we have dublicate publishing
00322     sensor_msgs::Image image_msg;
00323     image_msg = *(gray_image.toImageMsg());
00324     m_DebugImagePublisherGray.publish(image_msg);
00325     image_msg = *(color_image.toImageMsg());
00326     m_DebugImagePublisherColor.publish(image_msg);
00327 
00328 
00329     // TODO testcode remove this
00330     //    cv_bridge::CvImagePtr test = cv_bridge::toCvCopy(or_image_msg.image_gray);
00331     //    imwrite("/home/vseib/Desktop/test_gray.png", test->image);
00332     //    cv_bridge::CvImagePtr test2 = cv_bridge::toCvCopy(or_image_msg.image_color);
00333     //    imwrite("/home/vseib/Desktop/test_color.png", test2->image);
00334 }
00335 
00336 void THIS::setBackground ( cv_bridge::CvImagePtr gray_image, cv_bridge::CvImagePtr color_image )
00337 {
00338     //safe gray image
00339     if ( m_BackgroundImageGray ) { delete m_BackgroundImageGray; }
00340     m_BackgroundImageGray = new cv::Mat(gray_image->image);
00341 
00342     //safe color image
00343     if ( m_BackgroundImageColor ) { delete m_BackgroundImageColor; }
00344     m_BackgroundImageColor = new cv::Mat(color_image->image);
00345 
00346 //    cv::namedWindow("backgray", CV_WINDOW_AUTOSIZE);
00347 //    cv::imshow( "backgray", *m_BackgroundImageGray );
00348 //    cv::waitKey(0);
00349 
00350     // send color image to OLPrimary-GUI
00351     or_msgs::OrImage or_color_image_msg;
00352     or_color_image_msg.image_color = *(color_image->toImageMsg());
00353     m_OLPrimaryImagePublisher.publish(or_color_image_msg);
00354 
00355     //TODO get rid of double publishing
00356     sensor_msgs::Image image_msg_color;
00357     image_msg_color = *(color_image->toImageMsg());
00358     m_OLPrimaryImagePublisherColor.publish(image_msg_color);
00359 
00360 
00361     or_msgs::OrImage or_image_msg;
00362     or_image_msg.image_gray = *(gray_image->toImageMsg());
00363     or_image_msg.image_color = *(color_image->toImageMsg());
00364     or_image_msg.filename = "";
00365     m_OLDebugImagePublisher.publish(or_image_msg);
00366 
00367     // TODO here we have dublicate publishing
00368     sensor_msgs::Image image_msg2;
00369     image_msg2 = *(color_image->toImageMsg());
00370     m_DebugImagePublisherColor.publish(image_msg2);
00371     sensor_msgs::Image image_msg;
00372     image_msg = *(gray_image->toImageMsg());
00373     m_DebugImagePublisherGray.publish(image_msg);
00374 }
00375 
00376 void THIS::setForeground ( cv_bridge::CvImagePtr gray_image, cv_bridge::CvImagePtr color_image )
00377 {
00378     //safe gray image
00379     if ( m_ForegroundImageGray ) { delete m_ForegroundImageGray; }
00380     m_ForegroundImageGray = new cv::Mat(gray_image->image);
00381 
00382     //safe color image
00383     if ( m_ForegroundImageColor ) { delete m_ForegroundImageColor; }
00384     m_ForegroundImageColor = new cv::Mat(color_image->image);
00385 }
00386 
00387 
00388 void THIS::saveImage ( std::string name )
00389 {
00390     ImagePropertiesCV* imageProperties = makeImageProperties( name, true );
00391     if ( imageProperties )
00392     {
00393         imageProperties->calculateProperties();
00394         m_ObjectProperties->addImageProperties( imageProperties );
00395     }
00396 }
00397 
00398 
00399 void THIS::displayImage ( int index )
00400 {
00401     std::vector<ImagePropertiesCV*> pVec = m_ObjectProperties->getImageProperties( );
00402 
00403     if ( index > (int)(pVec.size()-1) )
00404     {
00405         ROS_ERROR_STREAM( "Image #" << index << " does not exist!" );
00406         return;
00407     }
00408 
00409     const ImagePropertiesCV* imageProperties = pVec[ index ];
00410 
00411     // send color image to OLPrimary-GUI
00412     cv::Mat image = *imageProperties->getMaskedImageUV(); // TODO instead of UV an rgb image was saved: does this change something?
00413     cv_bridge::CvImagePtr cv_ptr_color(new cv_bridge::CvImage(std_msgs::Header(), "bgr8", image));
00414     or_msgs::OrImage or_image_msg;
00415     or_image_msg.image_color = *(cv_ptr_color->toImageMsg());
00416 
00417     const std::vector< KeyPoint >* keyPoints = imageProperties->getKeyPoints();
00418 
00419     for ( unsigned kp = 0; kp < keyPoints->size(); kp++ )
00420     {
00421         // TODO make creation of ROS messages nicer
00422 
00423         VectorObject2D arrow ( ( *keyPoints ) [kp].getCircle(), 0, 0, 0, 5.0 );
00424         // convert VectorObject2D to ROS message
00425         or_msgs::VectorObject2D arrow_msg;
00426         arrow_msg.r = arrow.r();
00427         arrow_msg.g = arrow.g();
00428         arrow_msg.b = arrow.b();
00429         arrow_msg.line_width = arrow.lineWidth();
00430         std::vector<Point2D> tmp_vertices = arrow.vertices();
00431         for(unsigned i = 0; i < tmp_vertices.size(); i++)
00432         {
00433             or_msgs::Point2D point;
00434             point.x = tmp_vertices.at(i).x();
00435             point.y = tmp_vertices.at(i).y();
00436             arrow_msg.vertices.push_back(point);
00437         }
00438         or_image_msg.vector_objects.push_back( arrow_msg );
00439 
00440 
00441         VectorObject2D arrow2 ( ( *keyPoints ) [kp].getCircle(), 1, 1, 1, 3.0 );
00442         // convert VectorObject2D to ROS message
00443         or_msgs::VectorObject2D arrow_msg2;
00444         arrow_msg2.r = arrow2.r();
00445         arrow_msg2.g = arrow2.g();
00446         arrow_msg2.b = arrow2.b();
00447         arrow_msg2.line_width = arrow2.lineWidth();
00448         tmp_vertices = arrow2.vertices();
00449         for(unsigned i = 0; i < tmp_vertices.size(); i++)
00450         {
00451             or_msgs::Point2D point;
00452             point.x = tmp_vertices.at(i).x();
00453             point.y = tmp_vertices.at(i).y();
00454             arrow_msg.vertices.push_back(point);
00455         }
00456         or_image_msg.vector_objects.push_back( arrow_msg2 );
00457     }
00458 
00459     Point2D center = imageProperties->getCenter();
00460 
00461     int size = imageProperties->getImageMask()->getWidth() / 20;
00462 
00463     ROS_INFO_STREAM( "Center: " << center.x() << " " << center.y() );
00464 
00465     std::vector<Point2D> vLine;
00466     vLine.push_back( center-Point2D( 0, size ) );
00467     vLine.push_back( center+Point2D( 0, size ) );
00468     or_msgs::VectorObject2D v_line_msg;
00469     for(unsigned i = 0; i < vLine.size(); i++)
00470     {
00471         or_msgs::Point2D point;
00472         point.x = vLine.at(i).x();
00473         point.y = vLine.at(i).y();
00474         v_line_msg.vertices.push_back(point);
00475     }
00476     v_line_msg.r = 1;
00477     v_line_msg.g = 1;
00478     v_line_msg.b = 1;
00479     v_line_msg.line_width = 3.0;
00480     or_image_msg.vector_objects.push_back(v_line_msg);
00481     v_line_msg.r = 0;
00482     v_line_msg.g = 0;
00483     v_line_msg.b = 1;
00484     v_line_msg.line_width = 1.0;
00485     or_image_msg.vector_objects.push_back(v_line_msg);
00486 
00487 
00488     std::vector<Point2D> hLine;
00489     hLine.push_back( center-Point2D( size, 0 ) );
00490     hLine.push_back( center+Point2D( size, 0 ) );
00491     or_msgs::VectorObject2D h_line_msg;
00492     for(unsigned i = 0; i < hLine.size(); i++)
00493     {
00494         or_msgs::Point2D point;
00495         point.x = hLine.at(i).x();
00496         point.y = hLine.at(i).y();
00497         h_line_msg.vertices.push_back(point);
00498     }
00499     h_line_msg.r = 1;
00500     h_line_msg.g = 1;
00501     h_line_msg.b = 1;
00502     h_line_msg.line_width = 3.0;
00503     or_image_msg.vector_objects.push_back(h_line_msg);
00504     h_line_msg.r = 0;
00505     h_line_msg.g = 0;
00506     h_line_msg.b = 1;
00507     h_line_msg.line_width = 1.0;
00508     or_image_msg.vector_objects.push_back(h_line_msg);
00509 
00510 
00511     VectorObject2D border ( *(imageProperties->getOutline()), 1, 0, 0, 1.0 );
00512     // convert VectorObject2D to ROS message
00513     or_msgs::VectorObject2D vector_msg;
00514     vector_msg.r = border.r();
00515     vector_msg.g = border.g();
00516     vector_msg.b = border.b();
00517     vector_msg.line_width = border.lineWidth();
00518 
00519     std::vector<Point2D> tmp_vertices = border.vertices();
00520     for(unsigned i = 0; i < tmp_vertices.size(); i++)
00521     {
00522         or_msgs::Point2D point;
00523         point.x = tmp_vertices.at(i).x();
00524         point.y = tmp_vertices.at(i).y();
00525         vector_msg.vertices.push_back(point);
00526     }
00527     or_image_msg.vector_objects.push_back(vector_msg);
00528 
00529     m_OLPrimaryImagePublisher.publish(or_image_msg);
00530 
00531     // TODO get rid of double publishing
00532     sensor_msgs::Image image_msg;
00533     image_msg = *(cv_ptr_color->toImageMsg());
00534     m_OLPrimaryImagePublisherColor.publish(image_msg);
00535 }
00536 
00537 void THIS::deleteImage ( int index )
00538 {
00539     m_ObjectProperties->deleteImageProperties( index );
00540 }
00541 
00542 ImagePropertiesCV* THIS::makeImageProperties( std::string name, bool crop )
00543 {
00544     if ( !m_BackgroundImageGray || !m_BackgroundImageColor )
00545     {
00546         ROS_ERROR_STREAM ( "Background image missing!" );
00547         return 0;
00548     }
00549     if ( !m_ForegroundImageGray || !m_ForegroundImageColor )
00550     {
00551         ROS_ERROR_STREAM ( "Foreground image missing!" );
00552         return 0;
00553     }
00554 
00555     ROS_WARN_STREAM("m_DifferenceThreshold: "<<m_DifferenceThreshold);
00556 
00557 
00558 
00559     ImageMaskCV mask(*m_ForegroundImageGray, *m_ForegroundImageColor,
00560                      *m_BackgroundImageGray, *m_BackgroundImageColor, m_DifferenceThreshold);
00561 
00562 
00563     // TODO enable this with debug flag and replace the following if clause
00564 //    int width = m_ForegroundImageGray->cols; //640
00565 //    int height = m_ForegroundImageGray->rows;//480;
00566 //    cv::Mat maskImage = cv::Mat::zeros(height,width,CV_8UC1);
00567 //    maskImage.data = mask.getData();
00568 //    cv::namedWindow("Mask Image New", CV_WINDOW_AUTOSIZE);
00569 //    cv::imshow("Mask Image New",maskImage);
00570 
00571 //    //Debug output if needed:
00572 //    if(ros::param::has("/OrNodes/debugOutput") && ros::param::has("/OrNodes/debugOutput"))
00573 //    {
00574 //        std::string debugOutput;
00575 //        ros::param::get("/OrNodes/debugOutput", debugOutput);
00576 //        std::string strTrue = "/true";
00577 
00578 //        if(debugOutput==strTrue)
00579 //        {
00580 //            //write the outputimage on hardrive
00581 //            if(ros::param::has("/OrNodes/debugOutputPath") && ros::param::has("/OrNodes/debugOutputPath"))
00582 //            {
00583 //                std::string debugOutputPath;
00584 //                ros::param::get("/OrNodes/debugOutputPath", debugOutputPath);
00585 //                std::string strDefault = "/default";
00586 //                if(debugOutputPath==strDefault)
00587 //                {
00588 //                    debugOutputPath = ros::package::getPath("or_nodes");
00589 //                }
00590 //                std::stringstream imageOutputFileName;
00591 //                imageOutputFileName<<debugOutputPath<<"/debug_learning_mask.jpg";
00592 //                ROS_ERROR_STREAM("write Output Image to" + imageOutputFileName.str());
00593 //                cv::imwrite(imageOutputFileName.str().c_str(),maskImage);
00594 
00595 //                cv::namedWindow("Mask Image", CV_WINDOW_AUTOSIZE);
00596 //                cv::imshow("Mask Image",maskImage);
00597 //                cv::waitKey(0);
00598 //            }
00599 //        }
00600 //    }
00601 
00602     mask.erode ( 1.0 );
00603     mask.dilate ( m_OpenRadius );
00604     mask.erode ( m_OpenRadius - 1.0 );
00605 
00606     //delete all mask segments but largest
00607     if ( m_IsolateLargestSegment )
00608     {
00609         ConnectedComponentAnalyzer::isolateLargestSegment ( mask.getData(), mask.getWidth(), mask.getHeight() );
00610     }
00611 
00612     mask.dilate ( m_BorderSize );
00613 
00614     if ( crop )
00615     {
00616         //crop image & mask
00617         Box2D<int> area = mask.getBoundingBox();
00618 
00619         float borderSize = Config::getFloat( "ObjectRecognition.fObjectImageBorder" );
00620         int border = ( area.width()+area.height() ) / 2 * borderSize;
00621         area.expand( border+2 );
00622 
00623         area.clip( Box2D<int>( 0, 0, m_ForegroundImageGray->cols, m_ForegroundImageGray->rows ) );
00624 
00625         int newWidth = area.width();
00626         int newHeight= area.height();
00627         int minX = area.minX();
00628         int minY = area.minY();
00629 
00630         ImageMaskCV *croppedMask = mask.subMask( area );
00631 
00632         cv::Mat* croppedGrayImage = new cv::Mat(newHeight, newWidth, CV_8UC1);
00633         for ( int y=0; y<newHeight; y++ )
00634         {
00635             for ( int x=0; x<newWidth; x++ )
00636             {
00637                 croppedGrayImage->at<unsigned char>(y,x) = m_ForegroundImageGray->at<unsigned char>(y+minY, x+minX);
00638             }
00639         }
00640 
00641         cv::Mat* croppedColorImage = new cv::Mat(newHeight, newWidth, CV_8UC3);
00642         for ( int y=0; y<newHeight; y++ )
00643         {
00644             for ( int x=0; x<newWidth; x++ )
00645             {
00646                 croppedColorImage->at<cv::Vec3b>(y,x) = m_ForegroundImageColor->at<cv::Vec3b>(y+minY, x+minX);
00647             }
00648         }
00649         ImagePropertiesCV* imageProperties = new ImagePropertiesCV( name, croppedGrayImage, croppedColorImage, croppedMask );
00650 
00651         cv::Mat maskImage = cv::Mat(newHeight, newWidth, CV_8UC1);
00652         maskImage.data = imageProperties->getImageMask()->getData();
00653         cv::Mat* maskImage2 = imageProperties->getImageUV();
00654         cv::Mat* maskImage3 = imageProperties->getMaskedImageUV();
00655 
00656         // TODO enable with debug flag
00657 //        cv::namedWindow("props mask crop", CV_WINDOW_AUTOSIZE);
00658 //        cv::imshow("props mask crop",maskImage); // TODO datenmüll
00659 //        cv::namedWindow("props image uv crop", CV_WINDOW_AUTOSIZE);
00660 //        cv::imshow("props image uv crop",*maskImage2); // TODO gestaucht rechte und linke hälfte getauscht
00661 //        cv::namedWindow("props masked image uv crop", CV_WINDOW_AUTOSIZE);
00662 //        cv::imshow("props masked image uv crop",*maskImage3); // TODO ausschnitt ist maske, aber inhalt graubild
00663 //        cv::waitKey(0);
00664 
00665         cv_bridge::CvImagePtr gray_img_ptr(new cv_bridge::CvImage(std_msgs::Header(), "mono8", maskImage));
00666         sensor_msgs::Image image_msg_mask;
00667         image_msg_mask = *(gray_img_ptr->toImageMsg());
00668         m_DebugImagePublisherGray.publish(image_msg_mask);
00669 
00670         cv_bridge::CvImagePtr color_img_ptr(new cv_bridge::CvImage(std_msgs::Header(), "bgr8", *maskImage3));
00671         sensor_msgs::Image image_msg_masked_image;
00672         image_msg_masked_image = *(color_img_ptr->toImageMsg());
00673         m_DebugImagePublisherColor.publish(image_msg_masked_image);
00674 
00675 
00676         return imageProperties;
00677     }
00678     else
00679     {
00680         ImagePropertiesCV* imageProperties = new ImagePropertiesCV( name, new cv::Mat( *m_ForegroundImageGray ),
00681                                      new cv::Mat( *m_ForegroundImageColor ), new ImageMaskCV( mask ) );
00682         cv::Mat maskImage = cv::Mat::zeros(m_ForegroundImageGray->rows, m_ForegroundImageGray->cols, CV_8UC1);
00683         maskImage.data = imageProperties->getImageMask()->getData();
00684         cv::Mat* maskImage2 = imageProperties->getImageUV();
00685         cv::Mat* maskImage3 = imageProperties->getMaskedImageUV();
00686 
00687         // TODO enable with debug flag
00688 //        cv::namedWindow("props mask", CV_WINDOW_AUTOSIZE);
00689 //        cv::imshow("props mask",maskImage);
00690 //        cv::namedWindow("props image uv", CV_WINDOW_AUTOSIZE);
00691 //        cv::imshow("props image uv",*maskImage2);
00692 //        cv::namedWindow("props masked image uv", CV_WINDOW_AUTOSIZE);
00693 //        cv::imshow("props masked image uv",*maskImage3);
00694 //        cv::waitKey(0);
00695 
00696         cv_bridge::CvImagePtr gray_img_ptr(new cv_bridge::CvImage(std_msgs::Header(), "mono8", maskImage));
00697         sensor_msgs::Image image_msg_mask;
00698         image_msg_mask = *(gray_img_ptr->toImageMsg());
00699         m_DebugImagePublisherGray.publish(image_msg_mask);
00700 
00701         cv_bridge::CvImagePtr color_img_ptr(new cv_bridge::CvImage(std_msgs::Header(), "bgr8", *maskImage3));
00702         sensor_msgs::Image image_msg_masked_image;
00703         image_msg_masked_image = *(color_img_ptr->toImageMsg());
00704         m_DebugImagePublisherColor.publish(image_msg_masked_image);
00705 
00706         return imageProperties;
00707     }
00708 }
00709 
00710 void THIS::previewIsolatedImage()
00711 {
00712     ImagePropertiesCV* imageProperties = makeImageProperties( "", false );
00713 
00714     if ( imageProperties )
00715     {
00716         // send color image to OLPrimary-GUI
00717         or_msgs::OrImage or_image_msg;
00718 
00719         cv::Mat maskedImage = *(imageProperties->getMaskedImageUV());
00720         cv_bridge::CvImagePtr cv_ptr_color2(new cv_bridge::CvImage(std_msgs::Header(), "bgr8", maskedImage));
00721         or_image_msg.image_color = *(cv_ptr_color2->toImageMsg());
00722 
00723         // TODO hier weiter machen: outline ins bild malen und bild verschicken
00724         imageProperties->traceOutline();
00725         VectorObject2D border ( *(imageProperties->getOutline()), 1, 0, 0, 1.0 );
00726 
00727 
00728         // convert VectorObject2D to ROS message
00729         or_msgs::VectorObject2D vector_msg;
00730         vector_msg.r = border.r();
00731         vector_msg.g = border.g();
00732         vector_msg.b = border.b();
00733         vector_msg.line_width = border.lineWidth();
00734 
00735 
00736         // create image with painted border
00737         cv::Mat borderImage = imageProperties->getImageUV()->clone();
00738 
00739         std::vector<Point2D> tmp_vertices = border.vertices();
00740         for(unsigned i = 0; i < tmp_vertices.size(); i++)
00741         {
00742             or_msgs::Point2D point;
00743             point.x = tmp_vertices.at(i).x();
00744             point.y = tmp_vertices.at(i).y();
00745             vector_msg.vertices.push_back(point);
00746             cv::circle(borderImage, cv::Point(tmp_vertices.at(i).x(), tmp_vertices.at(i).y()), 1, cv::Scalar(0,0,255));
00747         }
00748         or_image_msg.vector_objects.push_back(vector_msg);
00749         m_OLPrimaryImagePublisher.publish(or_image_msg);
00750 
00751 
00752         //TODO get rid of double publishing
00753         cv_bridge::CvImagePtr cv_ptr_color(new cv_bridge::CvImage(std_msgs::Header(), "bgr8", borderImage));
00754         sensor_msgs::Image image_msg_color;
00755         image_msg_color = *(cv_ptr_color->toImageMsg());
00756         m_OLPrimaryImagePublisherColor.publish(image_msg_color);
00757     }
00758 
00759     delete imageProperties;
00760 }
00761 
00762 void THIS::saveObject ( std::string objectName )
00763 {
00764     m_ObjectProperties->setName( objectName );
00765     m_ObjectProperties->setType( m_ObjectType );
00766 
00767     std::string path = ros::package::getPath("or_nodes");
00768     std::string filename = path + m_PathForSaving + objectName + ".objprop";
00769 
00770     // write objectProperties file
00771     std::ofstream out ( filename.c_str() );
00772     boost::archive::text_oarchive oa(out);
00773     oa << m_ObjectProperties;
00774 
00775     delete m_ObjectProperties;
00776     m_ObjectProperties = new ObjectProperties();
00777 
00778     ROS_INFO_STREAM ( "Object saved to " << filename );
00779 }
00780 
00781 
00782 void THIS::loadObject ( std::string filename )
00783 {
00784     delete m_ObjectProperties;
00785 
00786     //std::string filename = m_PathForSaving + objectName + ".objprop";
00787 
00788     ROS_INFO_STREAM( "Loading " + filename );
00789     std::ifstream ifs( filename.c_str() );
00790     boost::archive::text_iarchive ia(ifs);
00791     m_ObjectProperties = new ObjectProperties();
00792     ia >> m_ObjectProperties;
00793     ifs.close();
00794 
00795     m_ObjectType = m_ObjectProperties->getType();
00796     ROS_INFO_STREAM( m_ObjectType );
00797 }
00798 
00799 #undef THIS


or_nodes
Author(s): raphael
autogenerated on Mon Oct 6 2014 02:53:36