handle_detector_vision.cpp
Go to the documentation of this file.
00001 /*********************************************************************
00002  * Software License Agreement (BSD License)
00003  *
00004  *  Copyright (c) 2008, Willow Garage, Inc.
00005  *  All rights reserved.
00006  *
00007  *  Redistribution and use in source and binary forms, with or without
00008  *  modification, are permitted provided that the following conditions
00009  *  are met:
00010  *
00011  *   * Redistributions of source code must retain the above copyright
00012  *     notice, this list of conditions and the following disclaimer.
00013  *   * Redistributions in binary form must reproduce the above
00014  *     copyright notice, this list of conditions and the following
00015  *     disclaimer in the documentation and/or other materials provided
00016  *     with the distribution.
00017  *   * Neither the name of the Willow Garage nor the names of its
00018  *     contributors may be used to endorse or promote products derived
00019  *     from this software without specific prior written permission.
00020  *
00021  *  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00022  *  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00023  *  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00024  *  FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00025  *  COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00026  *  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00027  *  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00028  *  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00029  *  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00030  *  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00031  *  ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00032  *  POSSIBILITY OF SUCH DAMAGE.
00033  *********************************************************************/
00034 
00035 // Author: Marius Muja
00036 
00037 #include <vector>
00038 #include <fstream>
00039 #include <sstream>
00040 #include <time.h>
00041 #include <iostream>
00042 #include <iomanip>
00043 
00044 
00045 #include "cv_bridge/CvBridge.h"
00046 
00047 #include "opencv/cxcore.h"
00048 #include "opencv/cv.h"
00049 #include "opencv/highgui.h"
00050 
00051 #include "ros/ros.h"
00052 #include "ros/callback_queue.h"
00053 #include "stereo_msgs/DisparityImage.h"
00054 #include "sensor_msgs/CameraInfo.h"
00055 #include "sensor_msgs/Image.h"
00056 #include "sensor_msgs/PointCloud.h"
00057 #include "geometry_msgs/Point32.h"
00058 #include "geometry_msgs/PointStamped.h"
00059 #include "door_msgs/Door.h"
00060 #include "visualization_msgs/Marker.h"
00061 #include "door_handle_detector/DoorsDetector.h"
00062 #include "std_srvs/Empty.h"
00063 
00064 #include <string>
00065 
00066 // transform library
00067 #include <tf/transform_listener.h>
00068 
00069 #include <topic_synchronizer2/topic_synchronizer.h>
00070 
00071 #include <boost/thread.hpp>
00072 
00073 using namespace std;
00074 
00075 
00076 template <typename T>
00077 class IndexedIplImage
00078 {
00079   public:
00080     IplImage* img_;
00081     T* p;
00082 
00083     IndexedIplImage(IplImage* img) : img_(img)
00084     {
00085       p = (T*)img_->imageData;
00086     }
00087 
00088     operator IplImage*()
00089     {
00090       return img_;
00091     }
00092 
00093     T at(int x, int y, int chan = 0)
00094     {
00095       return *(p+y*img_->width+x*img_->nChannels+chan);
00096     }
00097 
00098     T integral_sum(const CvRect &r)
00099     {
00100       return at(r.x+r.width,r.y+r.height)-at(r.x+r.width,r.y)-at(r.x,r.y+r.height)+at(r.x,r.y);
00101     }
00102 
00103 };
00104 
00105 class HandleDetector
00106 {
00107   public:
00108 
00109     ros::NodeHandle nh_tilde_, nh_;
00110 
00111     sensor_msgs::ImageConstPtr limage_;
00112     sensor_msgs::ImageConstPtr rimage_;
00113     stereo_msgs::DisparityImageConstPtr dispimg_;
00114     sensor_msgs::CameraInfoConstPtr rcinfo_;
00115 
00116     sensor_msgs::CvBridge lbridge_;
00117     sensor_msgs::CvBridge rbridge_;
00118     sensor_msgs::CvBridge dbridge_;
00119 
00120   //    sensor_msgs::PointCloud cloud_fetch;
00121     sensor_msgs::PointCloudConstPtr cloud_;
00122 
00123     IplImage* left_;
00124     IplImage* right_;
00125     IplImage* disp_;
00126     IplImage* disp_clone_;
00127 
00128 
00129 
00130     ros::Subscriber left_image_sub_;
00131   //    ros::Subscriber left_caminfo_sub_;
00132   //    ros::Subscriber right_image_sub_;
00133   //    ros::Subscriber right_caminfo_sub_;
00134     ros::Subscriber cloud_sub_;
00135     ros::Subscriber dispimg_sub_;
00136   //    ros::Subscriber stereoinfo_sub_;
00137 
00138     ros::ServiceServer detect_service_;
00139     ros::ServiceServer preempt_service_;
00140 
00141     ros::Publisher marker_pub_;
00142 
00143     TopicSynchronizer sync_;
00144 
00145     tf::TransformListener tf_;
00146 
00147 
00148     // minimum height to look at (in base_footprint frame)
00149     double min_height_;
00150     // maximum height to look at (in base_footprint frame)
00151     double max_height_;
00152     // no. of frames to detect handle in
00153     int frames_no_;
00154     // display stereo images ?
00155     bool display_;
00156 
00157     bool preempted_;
00158     bool got_images_;
00159 
00160 
00161     double timeout_;
00162     ros::Time start_image_wait_;
00163 
00164 
00165     ros::CallbackQueue service_queue_;
00166     boost::thread service_thread_;
00167     boost::mutex data_lock_;
00168     boost::condition_variable data_cv_;
00169 
00170     CvHaarClassifierCascade* cascade;
00171     CvMemStorage* storage;
00172 
00173     HandleDetector()
00174       : nh_tilde_("~"), left_(NULL), right_(NULL), disp_(NULL), disp_clone_(NULL), sync_(&HandleDetector::syncCallback, this)
00175     {
00176         // define node parameters
00177         nh_tilde_.param("min_height", min_height_, 0.8);
00178         nh_tilde_.param("max_height", max_height_, 1.2);
00179         nh_tilde_.param("frames_no", frames_no_, 10);
00180         nh_tilde_.param("timeout", timeout_, 3.0);              // timeout (in seconds) until an image must be received, otherwise abort
00181         nh_tilde_.param("display", display_, false);
00182       stringstream ss;
00183       ss << getenv("ROS_ROOT") << "/../ros-pkg/mapping/door_handle_detector/data/";
00184       string path = ss.str();
00185       string cascade_classifier;
00186       nh_tilde_.param<string>("cascade_classifier", cascade_classifier, path + "handles_data.xml");
00187 
00188       if(display_)
00189       {
00190         cvNamedWindow("left", CV_WINDOW_AUTOSIZE);
00191         //cvNamedWindow("right", CV_WINDOW_AUTOSIZE);
00192         cvNamedWindow("disparity", CV_WINDOW_AUTOSIZE);
00193         cvNamedWindow("disparity_original", CV_WINDOW_AUTOSIZE);
00194       }
00195 
00196       // load a cascade classifier
00197       loadClassifier(cascade_classifier);
00198       // invalid location until we get a detection
00199 
00200       marker_pub_ = nh_tilde_.advertise<visualization_msgs::Marker>("visualization_marker",1);
00201 
00202       ros::AdvertiseServiceOptions service_opts = ros::AdvertiseServiceOptions::create<door_handle_detector::DoorsDetector>("door_handle_vision_detector",
00203         boost::bind(&HandleDetector::detectHandleSrv, this, _1, _2),ros::VoidPtr(), &service_queue_);
00204 
00205 //        detect_service_ = nh_.advertiseService("door_handle_vision_detector", &HandleDetector::detectHandleSrv, this);
00206       detect_service_ = nh_.advertiseService(service_opts);
00207       preempt_service_ = nh_.advertiseService("door_handle_vision_preempt", &HandleDetector::preempt, this);
00208       got_images_ = false;
00209 
00210       service_thread_ = boost::thread(boost::bind(&HandleDetector::serviceThread, this));
00211     }
00212 
00214     ~HandleDetector ()
00215     {
00216       // Clear the queue and terminate the thread
00217       service_queue_.clear ();
00218       service_queue_.disable ();
00219         service_thread_.join ();
00220     }
00221 
00222   private:
00224     void 
00225       loadClassifier (string cascade_classifier)
00226     {
00227       //                subscribe("/door_detector/door_msg", door, &HandleDetector::door_detected,1);
00228       ROS_INFO ("Loading cascade classifier: %s", cascade_classifier.c_str ());
00229       cascade = (CvHaarClassifierCascade*)(cvLoad(cascade_classifier.c_str (), 0, 0, 0));
00230       if (!cascade)
00231         ROS_ERROR ("Cannot load cascade classifier\n");
00232       storage = cvCreateMemStorage (0);
00233     }
00234 
00236     void 
00237       subscribeStereoData ()
00238     {
00239       left_image_sub_ = nh_.subscribe (nh_.resolveName("stereo")+"/left/image_rect", 1, sync_.synchronize(&HandleDetector::leftImageCallback, this));
00240 //              right_caminfo_sub_ = nh_.subscribe(nh_.resolveName("stereo")+"/right/cam_info", 1, sync_.synchronize(&HandleDetector::rightCameraInfoCallback, this));
00241       cloud_sub_ = nh_.subscribe (nh_.resolveName("stereo")+"/points", 1, sync_.synchronize(&HandleDetector::cloudCallback, this));
00242       dispimg_sub_ = nh_.subscribe (nh_.resolveName("stereo")+"/disparity", 1, sync_.synchronize(&HandleDetector::dispimgCallback, this));
00243 //              stereoinfo_sub_ = nh_.subscribe(nh_.resolveName("stereo")+"/stereo_info", 1, sync_.synchronize(&HandleDetector::stereoinfoCallback, this));
00244     }
00245 
00247     void 
00248       unsubscribeStereoData ()
00249     {
00250         left_image_sub_.shutdown ();
00251 //      right_caminfo_sub_.shutdown();
00252         cloud_sub_.shutdown ();
00253         dispimg_sub_.shutdown ();
00254 //      stereoinfo_sub_.shutdown();
00255         sync_.reset ();
00256     }
00257 
00258 
00260     void 
00261       syncCallback ()
00262     {
00263       if (disp_!=NULL)
00264         cvReleaseImage (&disp_);
00265       if (dbridge_.fromImage (dispimg_->image))
00266       {
00267         disp_ = cvCreateImage(cvGetSize (dbridge_.toIpl ()), IPL_DEPTH_8U, 1);
00268         //ROS_ERROR(" debug123 %d channels %d %d ", dispimg_->dpp,dbridge_.toIpl()->nChannels,disp_->nChannels);
00269         if (dbridge_.toIpl()->nChannels == 3) // convert to mono first
00270         {
00271           IplImage* tmp_ = cvCreateImage(cvGetSize(dbridge_.toIpl()), IPL_DEPTH_8U, 1);
00272           cvCvtColor(dbridge_.toIpl(),tmp_,CV_BGR2GRAY);
00273           cvCvtScale(tmp_, disp_, 4.0);
00274           cvReleaseImage(&tmp_);
00275         }
00276         else
00277           cvCvtScale(dbridge_.toIpl(), disp_, 4.0);
00278       }
00279 
00280       got_images_ = true;
00281       data_cv_.notify_all ();
00282     }
00283 
00285     void 
00286       leftImageCallback (const sensor_msgs::Image::ConstPtr& image)
00287     {
00288       //ROS_INFO("got left image callback: %lld", image->header.stamp.toNSec());
00289       boost::unique_lock<boost::mutex> lock (data_lock_);
00290 
00291       limage_ = image;
00292       if (lbridge_.fromImage (*limage_, "bgr8"))
00293         left_ = lbridge_.toIpl ();
00294     }
00295 
00297     void 
00298       dispimgCallback (const stereo_msgs::DisparityImage::ConstPtr& dinfo)
00299     {
00300       boost::unique_lock<boost::mutex> lock(data_lock_);
00301       //ROS_INFO("got dispimg callback: %lld", dinfo->header.stamp.toNSec());
00302       dispimg_ = dinfo;
00303     }
00304 
00306     void 
00307       cloudCallback (const sensor_msgs::PointCloud::ConstPtr& point_cloud)
00308     {
00309       boost::unique_lock<boost::mutex> lock (data_lock_);
00310       //ROS_INFO("got cloud callback: %lld", point_cloud->header.stamp.toNSec());
00311       cloud_ = point_cloud;
00312     }
00313 
00315     // Analyze the disparity image that values should not be too far off from one another
00316     // Id  -- 8 bit, 1 channel disparity image
00317     // R   -- rectangular region of interest
00318     // vertical -- This is a return that tells whether something is on a wall (descending disparities) or not.
00319     // minDisparity -- disregard disparities less than this
00320     //
00321     double 
00322       disparitySTD(IplImage *Id, const CvRect & R, double & meanDisparity, double minDisparity = 0.5)
00323     {
00324       int ws = Id->widthStep;
00325       unsigned char *p = (unsigned char*)(Id->imageData);
00326       int rx = R.x;
00327       int ry = R.y;
00328       int rw = R.width;
00329       int rh = R.height;
00330       int nchan = Id->nChannels;
00331       p += ws * ry + rx * nchan; //Put at start of box
00332       double mean = 0.0, var = 0.0;
00333       double val;
00334       int cnt = 0;
00335       //For vertical objects, Disparities should decrease from top to bottom, measure that
00336       for(int Y = 0;Y < rh;++Y)
00337       {
00338         for(int X = 0;X < rw;X++, p += nchan)
00339         {
00340           val = (double)*p;
00341           if(val < minDisparity)
00342             continue;
00343 
00344           mean += val;
00345           var += val * val;
00346           cnt++;
00347         }
00348         p += ws - (rw * nchan);
00349       }
00350 
00351       if(cnt == 0)
00352         return (10000000.0);
00353       //DO THE VARIANCE MATH
00354       mean = mean / (double)cnt;
00355       var = (var / (double)cnt) - mean * mean;
00356       meanDisparity = mean;
00357       return (sqrt(var));
00358     }
00359 
00361     struct Stats
00362     {
00363       Stats() : mean(0), stdev(0), min(0), max(0) {};
00364 
00365       float mean;
00366       float stdev;
00367       float min;
00368       float max;
00369     };
00370 
00371 
00373     void 
00374       pointCloudStatistics(const sensor_msgs::PointCloud& pc, Stats& x_stats, Stats& y_stats, Stats& z_stats)
00375     {
00376       uint32_t size = pc.get_points_size ();
00377       if (size == 0)
00378         return;
00379       x_stats.mean = 0;
00380       x_stats.stdev = 0;
00381       y_stats.mean = 0;
00382       y_stats.stdev = 0;
00383       z_stats.mean = 0;
00384       z_stats.stdev = 0;
00385 
00386       x_stats.min = pc.points[0].x;
00387       x_stats.max = pc.points[0].x;
00388       y_stats.min = pc.points[0].y;
00389       y_stats.max = pc.points[0].y;
00390       z_stats.min = pc.points[0].z;
00391       z_stats.max = pc.points[0].z;
00392 
00393       for (uint32_t i=0;i<size;++i) 
00394       {
00395         x_stats.mean += pc.points[i].x;
00396         y_stats.mean += pc.points[i].y;
00397         z_stats.mean += pc.points[i].z;
00398         x_stats.stdev += (pc.points[i].x*pc.points[i].x);
00399         y_stats.stdev += (pc.points[i].y*pc.points[i].y);
00400         z_stats.stdev += (pc.points[i].z*pc.points[i].z);
00401 
00402         if (x_stats.min>pc.points[i].x) x_stats.min = pc.points[i].x;
00403         if (x_stats.max<pc.points[i].x) x_stats.max = pc.points[i].x;
00404         if (y_stats.min>pc.points[i].y) y_stats.min = pc.points[i].y;
00405         if (y_stats.max<pc.points[i].y) y_stats.max = pc.points[i].y;
00406         if (z_stats.min>pc.points[i].z) z_stats.min = pc.points[i].z;
00407         if (z_stats.max<pc.points[i].z) z_stats.max = pc.points[i].z;
00408       }
00409 
00410       x_stats.mean /= size;
00411       y_stats.mean /= size;
00412       z_stats.mean /= size;
00413 
00414       x_stats.stdev = x_stats.stdev/size - x_stats.mean*x_stats.mean;
00415       y_stats.stdev = y_stats.stdev/size - y_stats.mean*y_stats.mean;
00416       z_stats.stdev = z_stats.stdev/size - z_stats.mean*z_stats.mean;
00417     }
00418 
00419 
00420 
00422 
00428     sensor_msgs::PointCloud 
00429       filterPointCloud (const sensor_msgs::PointCloud pc, const CvRect& rect)
00430     {
00431       sensor_msgs::PointCloud result;
00432       result.header.frame_id = pc.header.frame_id;
00433       result.header.stamp = pc.header.stamp;
00434 
00435       int xchan = -1;
00436       int ychan = -1;
00437 
00438       for (size_t i=0;i<pc.channels.size();++i) 
00439       {
00440         if (pc.channels[i].name == "u")
00441           xchan = i;
00442         if (pc.channels[i].name == "v") 
00443           ychan = i;
00444       }
00445 
00446       if (xchan == -1 || ychan == -1)
00447         return (result);
00448 
00449       int chan_size = pc.get_channels_size ();
00450       result.channels.resize (chan_size);
00451       for (int j = 0;j < chan_size; ++j)
00452         result.channels[j].name = pc.channels[j].name;
00453 
00454       for (size_t i = 0; i < pc.points.size (); ++i) 
00455       {
00456         int x = (int)pc.channels[xchan].values[i];
00457         int y = (int)pc.channels[ychan].values[i];
00458         if (x >= rect.x && x < rect.x + rect.width && y >= rect.y && y < rect.y + rect.height) 
00459         {
00460           result.points.push_back (pc.points[i]);
00461           for (int j = 0;j < chan_size; ++j) 
00462             result.channels[j].values.push_back (pc.channels[j].values[i]);
00463         }
00464       }
00465       return (result);
00466     }
00467 
00469 
00472     void 
00473       showHandleMarker (geometry_msgs::PointStamped p)
00474     {
00475       visualization_msgs::Marker marker;
00476       marker.header.frame_id = p.header.frame_id;
00477       marker.header.stamp = ros::Time();
00478       marker.ns = "handle_detector_vision";
00479       marker.id = 0;
00480       marker.type = visualization_msgs::Marker::SPHERE;
00481       marker.action = visualization_msgs::Marker::ADD;
00482       marker.pose.position.x = p.point.x;
00483       marker.pose.position.y = p.point.y;
00484       marker.pose.position.z = p.point.z;
00485       marker.pose.orientation.w = 1.0;
00486       marker.scale.x = marker.scale.y = marker.scale.z = 0.1;
00487       marker.color.g = 1.0;
00488       marker.color.a = 1.0;
00489       marker_pub_.publish(marker);
00490     }
00491 
00493     CvPoint 
00494       getDisparityCenter (CvRect& r)
00495     {
00496       CvMoments moments;
00497       double M00, M01, M10;
00498 
00499       cvSetImageROI(disp_, r);
00500       cvSetImageCOI(disp_, 1);
00501         cvMoments(disp_,&moments,1);
00502         M00 = cvGetSpatialMoment(&moments,0,0);
00503         M10 = cvGetSpatialMoment(&moments,1,0);
00504         M01 = cvGetSpatialMoment(&moments,0,1);
00505       cvResetImageROI(disp_);
00506       cvSetImageCOI(disp_, 0);
00507 
00508       CvPoint center;
00509       center.x = r.x+(int)(M10/M00);
00510       center.y = r.y+(int)(M01/M00);
00511 
00512       return (center);
00513     }
00514 
00515 
00517     void 
00518       tryShrinkROI (CvRect& r)
00519     {
00520       cvSetImageROI(disp_, r);
00521         IplImage* integral_patch = cvCreateImage(cvSize(r.width+1, r.height+1), IPL_DEPTH_32S, 1);
00522         cvIntegral(disp_, integral_patch);
00523         IndexedIplImage<int> ipatch(integral_patch);
00524 
00525         CvRect r2 = r;
00526         CvRect p;
00527         p.x = 0;
00528         p.y = 0;
00529         p.height = r.height;
00530         p.width = 1;
00531         while (ipatch.integral_sum(p)==0 && p.x<r.width)
00532                 p.x++;
00533         r2.x = r.x+p.x;
00534 
00535         p.x = r.width-1;
00536         while (ipatch.integral_sum(p)==0 && p.x>0)
00537                 p.x--;
00538         r2.width = r.x-r2.x+p.x;
00539 
00540         p.x = 0;
00541         p.y = 0;
00542         p.height = 1;
00543         p.width = r.width;
00544         while (ipatch.integral_sum(p)==0 && p.y<r.height)
00545                 p.y++;
00546         r2.y = r.y+p.y;
00547 
00548         p.y = r.height-1;
00549         while (ipatch.integral_sum(p)==0 && p.y>0)
00550                 p.y--;
00551         r2.height = r.y-r2.y+p.y;
00552 
00553         r = r2;
00554       cvResetImageROI(disp_);
00555       cvReleaseImage(&integral_patch);
00556     }
00557 
00559 
00567     bool 
00568       handlePossibleHere (CvRect &r)
00569     {
00570         tryShrinkROI (r);
00571 
00572         if (r.width < 10 || r.height < 10) 
00573       {
00574         ROS_INFO ("[handle_detector_vision] nhandlePossibleHere r.width %d r.height %d",r.width,r.height);
00575                 return (false);
00576         }
00577 
00578         cvSetImageROI (disp_, r);
00579         cvSetImageCOI (disp_, 1);
00580         int cnt;
00581         const float nz_fraction = 0.1;
00582         cnt = cvCountNonZero (disp_);
00583         if (cnt < nz_fraction * r.width * r.height)
00584       {
00585         ROS_INFO ("[handle_detector_vision] cnt %d nz_fraction %f w %d h %d", cnt, nz_fraction, r.width, r.height);
00586                 cvResetImageROI (disp_);
00587                 cvSetImageCOI (disp_, 0);
00588                 return (false);
00589         }
00590         cvResetImageROI (disp_);
00591         cvSetImageCOI (disp_, 0);
00592 
00593 
00594         // compute least-squares handle plane
00595         sensor_msgs::PointCloud pc = filterPointCloud (*cloud_, r);
00596         CvScalar plane = estimatePlaneLS (pc);
00597 
00598         cnt = 0;
00599         double avg = 0;
00600         double max_dist = 0;
00601         for(size_t i = 0; i < pc.points.size (); ++i)
00602       {
00603                 geometry_msgs::Point32 p = pc.points[i];
00604                 double dist = fabs(plane.val[0] * p.x + plane.val[1] * p.y + plane.val[2] * p.z + plane.val[3]);
00605                 max_dist = max(max_dist, dist);
00606                 avg += dist;
00607                 cnt++;
00608         }
00609         avg /= cnt;
00610         if(max_dist > 0.1 || avg < 0.001)
00611       {
00612         ROS_INFO("[handle_detector_vision] max_dist %f avg %f",max_dist,avg);
00613                 ROS_DEBUG("Not enough depth variation for handle candidate: %f, %f\n", max_dist, avg);
00614                 return false;
00615         }
00616 
00617         Stats xstats, ystats, zstats;
00618         pointCloudStatistics (pc, xstats, ystats, zstats);
00619 
00620       double dx, dy;
00621         dx = xstats.max - xstats.min;
00622         dy = ystats.max - ystats.min;
00623         
00624       if (dx > 0.25 || dy > 0.15)
00625       {
00626         ROS_INFO ("[handle_detector_vision] dx %f dy %f",dx,dy);
00627                 ROS_DEBUG ("Too big, discarding");
00628                 return (false);
00629         }
00630 
00631         geometry_msgs::PointStamped pin, pout;
00632         pin.header.frame_id = cloud_->header.frame_id;
00633         pin.header.stamp = cloud_->header.stamp;
00634         pin.point.x = xstats.mean;
00635         pin.point.y = ystats.mean;
00636         pin.point.z = zstats.mean;
00637       if (!tf_.waitForTransform ("base_footprint", pin.header.frame_id, pin.header.stamp, ros::Duration(5.0)))
00638       {
00639         ROS_INFO ("Cannot transform from base_footprint to %s", pin.header.frame_id.c_str());
00640         return (false);
00641       }
00642       tf_.transformPoint ("base_footprint", pin, pout);
00643         if (pout.point.z > max_height_ || pout.point.z < min_height_)
00644       {
00645                 ROS_INFO("Height not within admissable range: %f\n", pout.point.z);
00646                 return (false);
00647         }
00648 
00649         ROS_DEBUG ("Handle at: (%d,%d,%d,%d)", r.x,r.y,r.width, r.height);
00650 
00651         return (true);
00652     }
00653 
00655 
00658     void 
00659       findHandleCascade (vector<CvRect> & handle_rect)
00660     {
00661       IplImage *gray = cvCreateImage (cvSize (left_->width, left_->height), 8, 1);
00662       cvCvtColor (left_, gray, CV_BGR2GRAY);
00663       cvEqualizeHist (gray, gray);
00664       cvClearMemStorage (storage);
00665 
00666       if (cascade)
00667       {
00668         CvSeq *handles = cvHaarDetectObjects (gray, cascade, storage, 1.1, 2, 0, //|CV_HAAR_FIND_BIGGEST_OBJECT
00669         //|CV_HAAR_DO_ROUGH_SEARCH
00670         //|CV_HAAR_DO_CANNY_PRUNING
00671         //|CV_HAAR_SCALE_IMAGE
00672         cvSize (10, 10));
00673 
00674         for (int i = 0;i < (handles ? handles->total : 0);i++)
00675         {
00676           CvRect *r = (CvRect*)cvGetSeqElem(handles, i);
00677 
00678           if (handlePossibleHere (*r))
00679           {
00680             handle_rect.push_back (*r);
00681             if (display_)
00682             {
00683               cvRectangle (left_, cvPoint(r->x, r->y), cvPoint(r->x + r->width, r->y + r->height), CV_RGB(255, 255, 0));
00684               cvRectangle (disp_, cvPoint(r->x, r->y), cvPoint(r->x + r->width, r->y + r->height), CV_RGB(255, 255, 255));
00685             }
00686           }
00687           else
00688           {
00689             if (display_)
00690               cvRectangle (left_, cvPoint(r->x, r->y), cvPoint(r->x + r->width, r->y + r->height), CV_RGB(255, 0, 0));
00691            }
00692         }
00693       }
00694       else
00695         ROS_ERROR ("Cannot look for handle, no detector loaded");
00696       cvReleaseImage (&gray);
00697     }
00698 
00700 
00707     inline bool 
00708       belongsToCluster (pair<float,float> center, pair<float,float> p)
00709     {
00710         return (fabs (center.first-p.first) < 3 && fabs (center.second-p.second) < 3);
00711     }
00712 
00713 
00715 
00721     inline pair<float,float> 
00722       rectCenter (const CvRect& r)
00723     {
00724         return (make_pair (r.x+(float)r.width/2,r.y+(float)r.height/2));
00725     }
00726 
00728 
00738     bool 
00739       decideHandlePosition (vector<CvRect>& handle_rect, geometry_msgs::PointStamped & handle)
00740     {
00741         if (handle_rect.size() == 0)
00742       {
00743         ROS_WARN ("Handle rect size is 0!");
00744                 return (false);
00745       }
00746 
00747         vector<int> cluster_size;
00748         vector<pair<float,float> > centers;
00749         vector<pair<float,float> > sizes;
00750 
00751         for (size_t i = 0;i < handle_rect.size (); ++i)
00752       {
00753                 CvRect r = handle_rect[i];
00754                 pair<float,float> crt = rectCenter (r);
00755                 bool found_cluster = false;
00756                 for (size_t j = 0;j < centers.size (); ++j)
00757         {
00758                         if (belongsToCluster (centers[j], crt))
00759           {
00760                                 int n = cluster_size[j];
00761                                 centers[j].first = (centers[j].first*n+crt.first)/(n+1);
00762                                 centers[j].second = (centers[j].second*n+crt.second)/(n+1);
00763                                 sizes[j].first = (sizes[j].first*n+r.width)/(n+1);
00764                                 sizes[j].second = (sizes[j].second*n+r.height)/(n+1);
00765                                 cluster_size[j]++;
00766                                 found_cluster = true;
00767                                 break;
00768                         }
00769                 }
00770                 if (!found_cluster)
00771         {
00772                         centers.push_back (crt);
00773                         sizes.push_back (make_pair (r.width,r.height));
00774                         cluster_size.push_back(1);
00775                 }
00776         }
00777 
00778         int max_ind = 0;
00779         int max = cluster_size[0];
00780 
00781         for (size_t i = 0;i < cluster_size.size (); ++i)
00782       {
00783                 if (cluster_size[i] > max)
00784         {
00785                         max = cluster_size[i];
00786                         max_ind = i;
00787                 }
00788         }
00789 
00790         CvRect bbox;
00791         bbox.x = (int) centers[max_ind].first - (int)(sizes[max_ind].first/2);
00792         bbox.y = (int) centers[max_ind].second - (int)(sizes[max_ind].second/2);
00793         bbox.width = (int) sizes[max_ind].first;
00794         bbox.height = (int) sizes[max_ind].second;
00795 
00796         sensor_msgs::PointCloud handle_cloud = filterPointCloud (*cloud_, bbox);
00797 
00798         Stats xstats;
00799         Stats ystats;
00800         Stats zstats;
00801         pointCloudStatistics (handle_cloud, xstats, ystats, zstats );
00802 
00803       geometry_msgs::PointStamped handle_stereo;
00804 
00805       handle_stereo.header.frame_id = cloud_->header.frame_id;
00806       handle_stereo.header.stamp = cloud_->header.stamp;
00807       handle_stereo.point.x = xstats.mean;
00808       handle_stereo.point.y = ystats.mean;
00809       handle_stereo.point.z = zstats.mean;
00810 
00811       if (!tf_.waitForTransform (handle.header.frame_id, handle_stereo.header.frame_id,
00812                                  handle_stereo.header.stamp, ros::Duration(5.0)))
00813       {
00814         ROS_ERROR ("Cannot transform from %s to %s", handle.header.frame_id.c_str(),
00815                    handle_stereo.header.frame_id.c_str());
00816         return (false);
00817       }
00818       tf_.transformPoint (handle.header.frame_id, handle_stereo, handle);
00819 
00820       ROS_INFO ("Clustered Handle at: (%d,%d,%d,%d)", bbox.x,bbox.y,bbox.width, bbox.height);
00821 
00822 
00823       if (display_)
00824       {
00825         cvRectangle (left_, cvPoint(bbox.x, bbox.y), cvPoint(bbox.x + bbox.width, bbox.y + bbox.height), CV_RGB(0, 255, 0));
00826         cvShowImage ("left", left_);
00827       }
00828       showHandleMarker (handle_stereo);
00829 
00830       return (true);
00831     }
00832 
00833 
00835 
00841     bool 
00842       runHandleDetector (geometry_msgs::PointStamped & handle)
00843     {
00844         vector<CvRect> handle_rect;
00845 
00846         // acquire cv_mutex lock
00847         boost::unique_lock<boost::mutex> lock (data_lock_);
00848 
00849       for (int i = 0; i < frames_no_; ++i) 
00850       {
00851         ROS_INFO ("DoorHandleDetector: Waiting for stereo data");
00852         got_images_ = false;
00853         preempted_ = false;
00854         // block until images are available to process
00855         start_image_wait_ = ros::Time::now ();
00856         while (!got_images_ && !preempted_)
00857           data_cv_.wait (lock);
00858         if (preempted_) 
00859         {
00860           ROS_INFO ("DoorHandleDetector: detect loop preempted, stereo data not received");
00861           return (false);
00862         }
00863 
00864         if (display_)
00865           // show original disparity
00866           cvShowImage ("disparity_original", disp_);
00867         ROS_INFO ("DoorHandleDetector: Running handle detection");
00868         
00869         // eliminate from disparity locations that cannot contain a handle
00870         applyPositionPrior ();
00871         
00872         // run cascade classifier
00873         findHandleCascade (handle_rect);
00874         
00875         if(display_)
00876         {
00877           // show filtered disparity
00878           cvShowImage ("disparity", disp_);
00879           // show left image
00880           cvShowImage ("left", left_);
00881           cvWaitKey (100);
00882         }
00883       }
00884 
00885       bool found = decideHandlePosition (handle_rect, handle);
00886       return (found);
00887     }
00888 
00890 
00893     bool 
00894       detectHandleSrv (door_handle_detector::DoorsDetector::Request & req, door_handle_detector::DoorsDetector::Response & resp)
00895     {
00896         ROS_INFO_STREAM ("detectHandleSrv thread id=" << boost::this_thread::get_id());
00897         preempted_ = false;
00898 
00899         ROS_INFO ("door_handle_detector_vision: Service called");
00900       geometry_msgs::PointStamped handle;
00901       handle.header.frame_id = req.door.header.frame_id;   // want handle in the same frame as the door
00902         subscribeStereoData ();
00903       bool found = runHandleDetector (handle);
00904         unsubscribeStereoData ();
00905 
00906       if (!found)
00907       {
00908         ROS_WARN ("No handles found.");
00909         return (false);
00910       }
00911       geometry_msgs::PointStamped handle_transformed;
00912       // transform the point in the expected frame
00913       if (!tf_.waitForTransform (req.door.header.frame_id, handle.header.frame_id, handle.header.stamp, ros::Duration (5.0)))
00914       {
00915         ROS_ERROR ("Cannot transform from %s to %s", handle.header.frame_id.c_str (), req.door.header.frame_id.c_str ());
00916         return (false);
00917       }
00918       
00919       tf_.transformPoint (req.door.header.frame_id, handle, handle_transformed);
00920 
00921       resp.doors.resize (1);
00922       if (found)
00923       {
00924         resp.doors[0] = req.door;
00925         resp.doors[0].header.stamp = handle.header.stamp; // set time stamp
00926         resp.doors[0].handle.x = handle.point.x;
00927         resp.doors[0].handle.y = handle.point.y;
00928         resp.doors[0].handle.z = handle.point.z;
00929         resp.doors[0].weight = 1;
00930       }
00931       else
00932       {
00933         resp.doors[0] = req.door;
00934         resp.doors[0].weight = -1;
00935       }
00936       return (true);
00937     }
00938 
00939 
00941     bool 
00942       preempt (std_srvs::Empty::Request& req, std_srvs::Empty::Response& resp)
00943     {
00944         ROS_INFO ("Preempting.");
00945         preempted_ = true;
00946         return (true);
00947     }
00948 
00950 
00956     CvScalar 
00957       estimatePlaneLS (sensor_msgs::PointCloud points)
00958     {
00959       CvScalar plane;
00960       int cnt = points.points.size();
00961       if (cnt==0)
00962       {
00963         plane.val[0] = plane.val[1] = plane.val[2] = plane.val[3] = -1;
00964         return plane;
00965       }
00966       CvMat *A = cvCreateMat(cnt, 3, CV_32FC1);
00967       for(int i = 0;i < cnt;++i)
00968       {
00969         geometry_msgs::Point32 p = points.points[i];
00970         cvmSet(A, i, 0, p.x);
00971         cvmSet(A, i, 1, p.y);
00972         cvmSet(A, i, 2, p.z);
00973       }
00974       vector<float> ones(cnt, 1);
00975       CvMat B;
00976       cvInitMatHeader(&B, cnt, 1, CV_32FC1, &ones[0]);
00977       CvMat *X = cvCreateMat(3, 1, CV_32FC1);
00978       int ok = cvSolve(A, &B, X, CV_SVD);
00979       if(ok)
00980       {
00981         float *xp = X->data.fl;
00982         float d = sqrt(xp[0] * xp[0] + xp[1] * xp[1] + xp[2] * xp[2]);
00983         plane.val[0] = xp[0] / d;
00984         plane.val[1] = xp[1] / d;
00985         plane.val[2] = xp[2] / d;
00986         plane.val[3] = -1 / d;
00987       }
00988       else
00989         plane.val[0] = plane.val[1] = plane.val[2] = plane.val[3] = -1;
00990       cvReleaseMat (&A);
00991       return (plane);
00992     }
00993 
00995 
00998     void 
00999       applyPositionPrior ()
01000     {
01001       sensor_msgs::PointCloud base_cloud;
01002       if (!tf_.waitForTransform ("base_footprint", cloud_->header.frame_id, cloud_->header.stamp, ros::Duration (5.0)))
01003       {
01004         ROS_ERROR ("Cannot transform from base_footprint to %s", cloud_->header.frame_id.c_str ());
01005         return;
01006       }
01007       tf_.transformPointCloud ("base_footprint", *cloud_, base_cloud);
01008       
01009       int xchan = -1, ychan = -1;
01010       for (size_t i = 0; i < base_cloud.channels.size (); ++i)
01011       {
01012         if (base_cloud.channels[i].name == "u")
01013           xchan = i;
01014         if (base_cloud.channels[i].name == "v")
01015           ychan = i;
01016       }
01017 
01018       if (xchan == -1 || ychan == -1)
01019       {
01020         ROS_WARN ("I can't find image coordinates in the point cloud, no filtering done.");
01021         return;
01022       }
01023 
01024       unsigned char *pd = (unsigned char*)(disp_->imageData);
01025       int ws = disp_->widthStep;
01026       for (size_t i = 0; i < base_cloud.get_points_size (); ++i)
01027       {
01028         int x = (int)(base_cloud.channels[xchan].values[i]);
01029         int y = (int)(base_cloud.channels[ychan].values[i]);
01030 
01031        if (base_cloud.points[i].z > max_height_ || base_cloud.points[i].z < min_height_) 
01032        {
01033          // pointer to the current pixel
01034          unsigned char* crt_pd = pd + x * ws + y;
01035          *crt_pd = 0;
01036        }
01037       }
01038     }
01039   
01040   public:
01042 
01046     bool
01047       spin ()
01048     {
01049       ros::Rate r (10);
01050       while (nh_.ok ())
01051       {
01052         data_lock_.lock ();
01053         if (!got_images_ )
01054         {
01055           if ((ros::Time::now () - start_image_wait_) > ros::Duration (timeout_))
01056           {
01057             preempted_ = true;
01058             data_cv_.notify_all ();
01059           }
01060         }
01061         data_lock_.unlock ();
01062 
01063         if (display_)
01064         {
01065           int key = cvWaitKey (10) & 0x00FF;
01066           if (key == 27) //ESC
01067             nh_.shutdown ();
01068         }
01069         else
01070           r.sleep ();
01071         ros::spinOnce ();
01072       }
01073 
01074       return (true);
01075     }
01076 
01077 
01079     void 
01080       serviceThread ()
01081     {
01082   //            ROS_INFO_STREAM("Starting thread " << boost::this_thread::get_id());
01083       ros::NodeHandle n;
01084 
01085       ros::Rate r (10);
01086       while (n.ok ())
01087       {
01088         service_queue_.callAvailable ();
01089         r.sleep ();
01090       }
01091     }
01092 };
01093 
01094 int 
01095   main (int argc, char **argv)
01096 {
01097         for (int i = 0; i<argc; ++i)
01098                 cout << "(" << i << "): " << argv[i] << endl;
01099 
01100         ros::init (argc, argv, "handle_detector_vision");
01101         HandleDetector detector;
01102         detector.spin ();
01103 
01104         return (0);
01105 }


door_handle_detector
Author(s): Radu Bogdan Rusu, Marius
autogenerated on Wed Dec 11 2013 14:17:00