learnsifts.cpp
Go to the documentation of this file.
00001 /*
00002  * Software License Agreement (Modified BSD License)
00003  *
00004  *  Copyright (c) 2012, PAL Robotics, S.L.
00005  *  All rights reserved.
00006  *
00007  *  Redistribution and use in source and binary forms, with or without
00008  *  modification, are permitted provided that the following conditions
00009  *  are met:
00010  *
00011  *   * Redistributions of source code must retain the above copyright
00012  *     notice, this list of conditions and the following disclaimer.
00013  *   * Redistributions in binary form must reproduce the above
00014  *     copyright notice, this list of conditions and the following
00015  *     disclaimer in the documentation and/or other materials provided
00016  *     with the distribution.
00017  *   * Neither the name of PAL Robotics, S.L. nor the names of its
00018  *     contributors may be used to endorse or promote products derived
00019  *     from this software without specific prior written permission.
00020  *
00021  *  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00022  *  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00023  *  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00024  *  FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00025  *  COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00026  *  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00027  *  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00028  *  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00029  *  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00030  *  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00031  *  ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00032  *  POSSIBILITY OF SUCH DAMAGE.
00033  *
00034  * @file learnsifts.cpp
00035  * @author Bence Magyar
00036  * @date April 2012
00037  * @version 0.2
00038  * @brief Main file of BLORT learnsifts node for ROS. Original source: Learnsifts.cpp of BLORT.
00039  */
00040 #include <blort/Tracker/TextureTracker.h>
00041 #include <blort/Tracker/utilities.hpp>
00042 #include <blort/TomGine/tgModel.h>
00043 #include <blort/TomGine/tgModelLoader.h>
00044 #include <blort/TomGine/tgTimer.h>
00045 #include <blort/GLWindow/GLWindow.h>
00046 #include <blort/ThreadObject/RecognizerThread.h>
00047 
00048 #include <ros/ros.h>
00049 #include <opencv2/core/core.hpp>
00050 #include <blort/blort/pal_util.h>
00051 
00052 #include <blort_msgs/TrackerConfidences.h>
00053 #include <image_transport/image_transport.h>
00054 #include <cv_bridge/cv_bridge.h>
00055 #include <sensor_msgs/image_encodings.h>
00056 #include <geometry_msgs/Pose.h>
00057 
00058 cv::Mat lastImage;
00059 bool need_new_image = true;
00060 TomGine::tgCamera::Parameter tgCamParams;
00061 bool need_cam_init;
00062 ros::Subscriber cam_info_sub;
00063 sensor_msgs::CameraInfo camera_info;
00064 
00065 void imageCb(const sensor_msgs::ImageConstPtr& msg)
00066 {
00067   cv_bridge::CvImagePtr cv_ptr;
00068   try
00069   {
00070     cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
00071   }
00072   catch (cv_bridge::Exception& e)
00073   {
00074     ROS_ERROR("cv_bridge exception: %s", e.what());
00075     return;
00076   }
00077 
00078   lastImage = cv_ptr->image;
00079   need_new_image = false; //FIXME remove
00080 }
00081 
00082 void cam_info_callback(const sensor_msgs::CameraInfo &msg)
00083 {
00084   tgCamParams = TomGine::tgCamera::Parameter(msg);
00085   ROS_INFO("Camera parameters received.");
00086   camera_info = msg;
00087   need_cam_init = false;
00088   cam_info_sub.shutdown();
00089 }
00090 
00091 struct TranslateStart
00092 {
00093   TranslateStart()
00094     : x(0), y(0)
00095   {
00096   }
00097 
00098   int x;
00099   int y;
00100 };
00101 
00102 int main(int argc, char *argv[] )
00103 {
00104   std::string config_root = argv[1];
00105 
00106   //this line should force opengl to run software rendering == no GPU
00107   //putenv("LIBGL_ALWAYS_INDIRECT=1");
00108 
00109   ros::init(argc, argv, "blort_learnsifts");
00110   ros::NodeHandle nh("blort_learnsifts");
00111   image_transport::ImageTransport it(nh);
00112   image_transport::Subscriber image_sub = it.subscribe("/blort_image", 1, &imageCb);
00113   cam_info_sub = nh.subscribe("/blort_camera", 1, &cam_info_callback);
00114   ros::Publisher confidences_pub = nh.advertise<blort_msgs::TrackerConfidences>("confidences", 100);
00115 
00116   printf("\n Learnsifts \n\n");
00117 
00118   printf(" Tracker control\n");
00119   printf(" -------------------------------------------\n");
00120   printf(" [q,Esc] Quit program\n");
00121   printf(" [Return] Save SIFT and ply models.\n");
00122   printf(" [Space] Save texture face and extract SIFTS.\n");
00123   printf(" -------------------------------------------\n");
00124   printf(" [4] Texture edge tracking\n");
00125   printf(" [5] Texture color tracking\n");
00126   printf(" [a] High accuracy tracking (less robust)\n");
00127   printf(" [e] Show edges of image\n");
00128   printf(" [i] Print tracking information to console\n");
00129   printf(" [l] Lock/Unlock tracking\n");
00130   printf(" [m] Switch display mode of model\n");
00131   printf(" [p] Show particles\n");
00132   printf(" [r] Reset tracker to initial pose\n");
00133   printf(" [s] Save model to file (including textures)\n");
00134   printf(" [t] Capture model texture from image\n");
00135   printf(" [u] Untexture/remove texture from model\n");
00136   printf(" \n\n ");
00137 
00138   blortGLWindow::Event event;
00139   TomGine::tgTimer timer;
00140   std::vector<vec3> view_list;
00141 
00142   // File names
00143   //FIXME: make these ROS parameters or eliminate them and use the content as parameters
00144   std::string pose_cal = blort_ros::addRoot("config/pose.cal", config_root);
00145   std::string tracking_ini(blort_ros::addRoot("config/tracking.ini", config_root));
00146   std::vector<std::string> ply_models, sift_files, model_names;
00147   std::string ply_model, sift_file, model_name;
00148 
00149   GetPlySiftFilenames(tracking_ini.c_str(), ply_models, sift_files, model_names);
00150   ply_model = ply_models[0]; sift_file = sift_files[0]; model_name = model_names[0];
00151   printf("Object name: %s\n", model_name.c_str());
00152   sift_file = blort_ros::addRoot(sift_file, config_root);
00153 
00154   // Get Parameter from file
00155   TomGine::tgPose camPose, camPoseT;
00156   Tracking::Tracker::Parameter trackParams;
00157   GetTrackingParameter(trackParams, tracking_ini.c_str(), config_root);
00158   getCamPose(pose_cal.c_str(), camPose);
00159   camPoseT = camPose.Transpose();
00160 
00161   printf("=> Getting camera intrinsics ... ");
00162   // wait for the first camera_info message
00163   need_cam_init = true;
00164   while(need_cam_init)
00165   {
00166     ros::spinOnce();
00167   }
00168   setCameraPose(tgCamParams, pose_cal.c_str());
00169   trackParams.camPar = tgCamParams;
00170 
00171   printf("OK\n");
00172 
00173   // Initialise image
00174   IplImage *image = cvCreateImage( cvSize(camera_info.width, camera_info.height), 8, 3 ); //FIXME dirty
00175 
00176   printf("=> Initialising tracker ... ");
00177 
00178   // Create OpenGL Window and Tracker
00179   CRecognizerThread* pThreadRec =
00180       new CRecognizerThread(blortRecognizer::CameraParameter(camera_info));
00181   blortGLWindow::GLWindow window(trackParams.camPar.width, trackParams.camPar.height, "Training");
00182   Tracking::TextureTracker tracker;
00183   tracker.init(trackParams);
00184   float rec_time = 0.0f;
00185   printf("OK\n");
00186 
00187   // Model for Tracker
00188   TomGine::tgPose trPose;
00189   trPose.t = vec3(0.0, 0.1, 0.0);
00190   trPose.Rotate(0.0f, 0.0f, 0.5f);
00191   std::string modelFullPath = blort_ros::addRoot(ply_model, config_root);
00192   printf("=> Trying to get the object model from file: %s\n", modelFullPath.c_str());
00193   int modelID = tracker.addModelFromFile(modelFullPath.c_str(), trPose, model_name.c_str(), true);
00194   printf(" OK\n");
00195   tracker.setLockFlag(true);
00196 
00197 
00198 
00199   // Model for Recognizer / TomGine (ray-model intersection)
00200   TomGine::tgModel model;
00201   TomGine::tgPose tg_pose;
00202   TomGine::tgModelLoader modelloader;
00203   modelloader.LoadPly(model, blort_ros::addRoot(ply_model, config_root).c_str());
00204 
00205   Tracking::movement_state movement = Tracking::ST_FAST;
00206   Tracking::quality_state quality = Tracking::ST_OK;
00207   Tracking::confidence_state confidence = Tracking::ST_BAD;
00208 
00209   need_new_image = true;
00210   while(need_new_image)
00211   {
00212     ros::spinOnce();
00213   }
00214   *image = lastImage;
00215 
00216   pThreadRec->Event();
00217 
00218   bool quit = false;
00219   bool translateXY = false;
00220   bool translateZ = false;
00221   bool rotateXYZ = false;
00222   bool rotateX = false;
00223   bool rotateY = false;
00224   bool rotateZ = false;
00225   TranslateStart start;
00226   while(!quit)
00227   {
00228     tracker.getModelMovementState(modelID, movement);
00229     tracker.getModelQualityState(modelID, quality);
00230     tracker.getModelConfidenceState(modelID, confidence);
00231 
00232     if(confidence == Tracking::ST_GOOD && movement == Tracking::ST_STILL && quality != Tracking::ST_LOCKED)
00233     {
00234       ROS_WARN("Sorry, no learning with this node.");
00235       ROS_INFO("New initial pose fixed: \nt: [%f, %f, %f] \nq:[%f, %f, %f, %f]",
00236                trPose.t.x,
00237                trPose.t.y,
00238                trPose.t.z,
00239                trPose.q.x,
00240                trPose.q.y,
00241                trPose.q.z,
00242                trPose.q.w);
00243     }
00244 
00245     //recovery, if object lost
00246     if(quality == Tracking::ST_LOST && rec_time > 0.2f)
00247     {
00248       //            TomGine::tgPose recPose;
00249       //            float conf;
00250       //
00251       //            pThreadRec->Recognize(image, recPose, conf);
00252       //            if(conf > recovery_conf_threshold)
00253       //            {
00254       //                ConvertCam2World(recPose, camPose, trPose);
00255       //                tracker.setModelInitialPose(modelID, trPose);
00256       //                tracker.resetUnlockLock();
00257       //            }
00258       //            //tracker.reset();
00259       //
00260       //            ROS_INFO("orig conf: %f", conf);
00261       //
00262       //            // if the recovery's confidence is high enough then
00263       //            // kick the tracker out of this state and let it try
00264       //
00265       //
00266       //            rec_time = 0.0f;
00267       //            ROS_WARN("Tried to recover for the %d. time.", ++rec3dcounter);
00268     }else{
00269       rec_time += timer.Update();
00270     }
00271 
00272     if(!need_new_image)
00273     {
00274       // Get image
00275       timer.Update();
00276       *image = lastImage;
00277 
00278       // Track object
00279       tracker.image_processing((unsigned char*)image->imageData);
00280       tracker.track();
00281 
00282       tracker.drawImage(0);
00283       tracker.drawCoordinates();
00284       tracker.getModelPose(modelID, trPose);
00285       tracker.drawResult(2.0f);
00286       tg_pose = trPose;
00287       need_new_image = true;
00288     }
00289     // Keyboard inputs:
00290     while(window.GetEvent(event)){
00291       quit = !InputControl(&tracker, event, config_root);
00292       if(event.type == blortGLWindow::TMGL_Press)
00293       {
00294         if(event.input == blortGLWindow::TMGL_Space)
00295         {
00296           // When hitting space bar -> Learn new sift features
00297           TomGine::tgPose pT1;
00298           TomGine::tgPose pT2, pT3, pT4;
00299           tracker.getModelPoseCamCoords(modelID, pT1);
00300 
00301           mat3 R; vec3 a;
00302           pT1.GetPose(R, a);
00303           a = R.transpose() * pT1.t;
00304           a.normalize();
00305           view_list.push_back(a);
00306 
00307           pT2 = pT1;
00308           pThreadRec->LearnSifts(image, model, pT2);
00309 
00310           std::vector<blortRecognizer::Siftex> sl;
00311           pThreadRec->GetLastSifts(sl);
00312           tracker.textureFromImage(true);
00313         }
00314         else if(event.input == blortGLWindow::TMGL_Return)
00315         {
00316           // When hitting Return -> Save sift model and recognize
00317           //TomGine::tgPose recPose;
00318           //float conf;
00319           pThreadRec->SaveSiftModel(sift_file.c_str());
00320           //pThreadRec->Recognize(image, recPose, conf);
00321           //ConvertCam2World(recPose, camPose, trPose);
00322           //tracker.setModelInitialPose(modelID, trPose);
00323           //tracker.reset(modelID);
00324           tracker.saveModels(blort_ros::addRoot("Resources/ply/", config_root).c_str());
00325         }
00326       }
00327       if(event.type == blortGLWindow::TMGL_Press)
00328       {
00329         if( event.input == blortGLWindow::TMGL_Button3 && ! (translateZ || rotateXYZ) )
00330         {
00331           translateXY = true;
00332           start.x = event.motion.x; start.y = event.motion.y;
00333         }
00334         if( event.input == blortGLWindow::TMGL_Button2 && ! (translateXY || rotateXYZ) )
00335         {
00336           translateZ = true;
00337           start.x = event.motion.x; start.y = event.motion.y;
00338         }
00339         if( event.input == blortGLWindow::TMGL_x && ! (translateXY || translateZ || rotateXYZ) )
00340         {
00341           std::cout << "Object rotation: (X) axis selected" << std::endl;
00342           rotateX = true;
00343           rotateY = false;
00344           rotateZ = false;
00345         }
00346         if( event.input == blortGLWindow::TMGL_y && ! (translateXY || translateZ || rotateXYZ) )
00347         {
00348           std::cout << "Object rotation: (Y) axis selected" << std::endl;
00349           rotateX = false;
00350           rotateY = true;
00351           rotateZ = false;
00352         }
00353         if( event.input == blortGLWindow::TMGL_z && ! (translateXY || translateZ || rotateXYZ) )
00354         {
00355           std::cout << "Object rotation: (Z) axis selected" << std::endl;
00356           rotateX = false;
00357           rotateY = false;
00358           rotateZ = true;
00359         }
00360         if( event.input == blortGLWindow::TMGL_Button1 )
00361         {
00362           rotateXYZ = true;
00363           start.x = event.motion.x; start.y = event.motion.y;
00364         }
00365       }
00366       if(event.type == blortGLWindow::TMGL_Release)
00367       {
00368         if(event.input == blortGLWindow::TMGL_Button3)
00369         {
00370           translateXY = false;
00371         }
00372         if(event.input == blortGLWindow::TMGL_Button2)
00373         {
00374           translateZ = false;
00375         }
00376         if(event.input == blortGLWindow::TMGL_Button1)
00377         {
00378           rotateXYZ = false;
00379         }
00380       }
00381       if(event.type == blortGLWindow::TMGL_Motion)
00382       {
00383         if(translateXY)
00384         {
00385           float translateX = (start.x - event.motion.x)*0.001;
00386           float translateY = (start.y - event.motion.y)*0.001;
00387           trackParams.camPar.pos.x += trackParams.camPar.rot.mat[0]*translateX + trackParams.camPar.rot.mat[1]*translateY;
00388           trackParams.camPar.pos.y += trackParams.camPar.rot.mat[3]*translateX + trackParams.camPar.rot.mat[4]*translateY;
00389           trackParams.camPar.pos.z += trackParams.camPar.rot.mat[6]*translateX + trackParams.camPar.rot.mat[7]*translateY;
00390           start.x = event.motion.x; start.y = event.motion.y;
00391           tracker.setCameraParameters(trackParams.camPar);
00392         }
00393         if(translateZ)
00394         {
00395           float translateZ = (start.y - event.motion.y)*0.001;
00396           trackParams.camPar.pos.x += trackParams.camPar.rot.mat[2]*translateZ;
00397           trackParams.camPar.pos.y += trackParams.camPar.rot.mat[5]*translateZ;
00398           trackParams.camPar.pos.z += trackParams.camPar.rot.mat[8]*translateZ;
00399           start.x = event.motion.x; start.y = event.motion.y;
00400           tracker.setCameraParameters(trackParams.camPar);
00401         }
00402         if(rotateX && rotateXYZ)
00403         {
00404           float thetaX = (-event.motion.x + start.x)*0.01;
00405           TomGine::tgPose nPose;
00406           tracker.getModelPose(modelID, nPose);
00407           nPose.Rotate(thetaX, 0.0f, 0.0f);
00408           tracker.setModelInitialPose(modelID, nPose);
00409           tracker.reset(modelID);
00410           start.x = event.motion.x; start.y = event.motion.y;
00411         }
00412         if(rotateY && rotateXYZ)
00413         {
00414           float thetaY = (-event.motion.x + start.x)*0.01;
00415           TomGine::tgPose nPose;
00416           tracker.getModelPose(modelID, nPose);
00417           nPose.Rotate(0.0f, thetaY, 0.0f);
00418           tracker.setModelInitialPose(modelID, nPose);
00419           tracker.reset(modelID);
00420           start.x = event.motion.x; start.y = event.motion.y;
00421         }
00422         if(rotateZ && rotateXYZ)
00423         {
00424           float thetaZ = (-event.motion.x + start.x)*0.01;
00425           TomGine::tgPose nPose;
00426           tracker.getModelPose(modelID, nPose);
00427           nPose.Rotate(0.0f, 0.0f, thetaZ);
00428           tracker.setModelInitialPose(modelID, nPose);
00429           tracker.reset(modelID);
00430           start.x = event.motion.x; start.y = event.motion.y;
00431         }
00432       }
00433       event.type = blortGLWindow::TMGL_None;
00434     }
00435 
00436     window.Update();
00437 
00438     //publish tracker inner confidences just for fun
00439     Tracking::ModelEntry* myModelEntry = tracker.getModelEntry(modelID);
00440     blort_msgs::TrackerConfidences tr_conf;
00441     tr_conf.edgeConf = myModelEntry->c_edge;
00442     tr_conf.confThreshold = myModelEntry->c_th;
00443     tr_conf.lostConf = myModelEntry->c_lost;
00444     tr_conf.distance = myModelEntry->t;
00445     confidences_pub.publish(tr_conf);
00446 
00447     window.Activate();
00448     ros::spinOnce();
00449   }
00450   delete(pThreadRec);
00451   cvReleaseImage(&image);
00452 }


blort_ros
Author(s): Bence Magyar
autogenerated on Wed Aug 26 2015 15:24:39