blob_store.cc
Go to the documentation of this file.
00001 /*
00002  * Copyright (c) 2008, Willow Garage, Inc.
00003  * All rights reserved.
00004  *
00005  * Redistribution and use in source and binary forms, with or without
00006  * modification, are permitted provided that the following conditions are met:
00007  *
00008  *     * Redistributions of source code must retain the above copyright
00009  *       notice, this list of conditions and the following disclaimer.
00010  *     * Redistributions in binary form must reproduce the above copyright
00011  *       notice, this list of conditions and the following disclaimer in the
00012  *       documentation and/or other materials provided with the distribution.
00013  *     * Neither the name of the Willow Garage, Inc. nor the names of its
00014  *       contributors may be used to endorse or promote products derived from
00015  *       this software without specific prior written permission.
00016  *
00017  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00018  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00019  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00020  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
00021  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00022  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
00023  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
00024  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
00025  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
00026  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00027  * POSSIBILITY OF SUCH DAMAGE.
00028  *
00029  */
00030 
00039 #include <semanticmodel/blob_store.h>
00040 #include "blob_properties.h"
00041 #include <boost/foreach.hpp>
00042 #include <cv_bridge/cv_bridge.h>
00043 #include <opencv/cv.h>
00044 #include <pcl16/ros/conversions.h>
00045 #include <sensor_msgs/image_encodings.h>
00046 #include <boost/optional.hpp>
00047 #include <tf/exceptions.h>
00048 #include <pcl16/features/normal_3d.h>
00049 #include <pcl16/filters/extract_indices.h>
00050 
00051 namespace semanticmodel
00052 {
00053 
00054 namespace mr=mongo_ros;
00055 namespace sm=sensor_msgs;
00056 namespace enc=sensor_msgs::image_encodings;
00057 namespace gm=geometry_msgs;
00058 
00059 using std::vector;
00060 using std::string;
00061 using std::map;
00062 using std::max;
00063 using std::min;
00064 
00065 typedef boost::mutex::scoped_lock Lock;
00066 typedef vector<float> Color;
00067 typedef map<string, Color> ColorMap;
00068 typedef pcl16::PointXYZRGB Point;
00069 typedef pcl16::PointCloud<Point> PointCloud;
00070 typedef boost::mutex::scoped_lock Lock;
00071 
00072 BlobStore::BlobStore (const string& canonical_frame,
00073                       const string& rgb_camera_frame,
00074                       const string& db_name,
00075                       const string& collection_namespace) 
00076   : canonical_frame_(canonical_frame),
00077     rgb_camera_frame_(rgb_camera_frame),
00078     base_frame_("/base_footprint"),
00079     db_name_(db_name), 
00080     max_head_angular_speed_(0.3), 
00081     it_(nh_), 
00082     tf_(ros::Duration(600)),
00083     pub_(it_.advertise("last_added_blob", 10)),
00084     switch_srv_(nh_.advertiseService("switch_blob_db", 
00085                                      &BlobStore::switchDb, this))
00086 {
00087 //  ROS_INFO_STREAM("storing collections in " << db_name << "/" <<
00088 //                  collection_namespace);
00089   SwitchDB::Request req;
00090   SwitchDB::Response resp;
00091   req.collection_namespace = collection_namespace;
00092   switchDb(req, resp);
00093 }
00094 
00095 bool BlobStore::switchDb (SwitchDB::Request& req,
00096                           SwitchDB::Response& resp)
00097 {
00098   Lock l(mutex_);
00099   images_.reset(
00100       new ImageCollection(db_name_, req.collection_namespace + "_images"));
00101   blobs_.reset(
00102       new BlobCollection(db_name_, req.collection_namespace + "_blobs"));
00103   ROS_DEBUG_STREAM_NAMED ("blob_store", "Successfully switched to " <<
00104                           req.collection_namespace);
00105   return true;
00106 }
00107 
00108 gm::PointStamped makePoint(const Point& p, const ros::Time& t, const string& f)
00109 {
00110   gm::PointStamped pt;
00111   pt.point.x = p.x;
00112   pt.point.y = p.y;
00113   pt.point.z = p.z;
00114   pt.header.stamp = t;
00115   pt.header.frame_id = f;
00116   return pt;
00117 }
00118 
00119 btQuaternion BlobStore::headAngleAt (const ros::Time& t)
00120 {
00121   const ros::Duration timeout(0.5);
00122   const string base = "/map";
00123 
00124   // no need to wait: see previous comment
00125   //tf_.waitForTransform(base, camera, t, timeout);
00126   tf::StampedTransform trans;
00127   tf_.lookupTransform(base, rgb_camera_frame_, t, trans);
00128   return trans.getRotation().asBt();
00129 }
00130 
00131 // To get a single number summarizing how fast the head is moving wrt the map,
00132 // take the norm of the vector of the rates of change of the Euler angles
00133 double BlobStore::headAngularSpeed (const ros::Time& t)
00134 {
00135   try
00136   {
00137     const double DT=0.5;
00138     ros::Duration d(DT);
00139     btMatrix3x3 m0(headAngleAt(t-d));
00140     btMatrix3x3 m1(headAngleAt(t+d));
00141     double a0, b0, c0, a1, b1, c1;
00142     m0.getEulerYPR(a0, b0, c0);
00143     m1.getEulerYPR(a1, b1, c1);
00144     const double da = (a1-a0)/(2*DT);
00145     const double db = (b1-b0)/(2*DT);
00146     const double dc = (c1-c0)/(2*DT);
00147     return sqrt(da*da+db*db+dc*dc);
00148   }
00149   catch (tf::TransformException& e)
00150   {
00151     ROS_WARN_STREAM("Received tf exception when computing head speed for time " 
00152                      << t << ", so returning large value (42)."
00153                      << " Exception was " << e.what());
00154     return 42;
00155   }
00156   
00157 }
00158 
00159 sm::Image::Ptr BlobStore::highlightBlobInImage(const Blob& blob,
00160                                                const sm::Image& img,
00161                                                const sm::CameraInfo& info) 
00162 {
00163   const ros::Time acquisition_time = img.header.stamp;
00164 
00165   ROS_DEBUG_STREAM_NAMED ("blob_store",
00166                           "Highlighting in image with acquisition time "
00167                           << acquisition_time);
00168 
00169   // Convert to open cv
00170   cv_bridge::CvImagePtr image;
00171   image = cv_bridge::toCvCopy(img, enc::BGR8);
00172 
00173   // Cam model initialize from info
00174   cam_model_.fromCameraInfo(info);
00175   const string target_frame = cam_model_.tfFrame();
00176 
00177   // Figure out corners of bounding box
00178   ROS_ASSERT(!blob.cloud->points.empty());
00179   boost::optional<double> min_x, min_y, max_x, max_y;
00180 
00181   // Wait for the transform that we'll need inside the loop
00182   // Actually not
00183   /*
00184   const ros::Duration timeout(0.5);
00185   ROS_DEBUG_STREAM_THROTTLE_NAMED (1.0, "blob_store",
00186                                    "Waiting for transform to " << target_frame
00187                                    << " from " << canonical_frame_ << " at "
00188                                    << acquisition_time);
00189   tf_.waitForTransform(target_frame, canonical_frame_,
00190                        acquisition_time, timeout);
00191   */
00192 
00193   tf::StampedTransform trans;
00194   tf_.lookupTransform(target_frame, canonical_frame_,
00195                       acquisition_time, trans);
00196 
00197   vector<cv::Point> points;
00198   BOOST_FOREACH (const Point& pcl16_pt, blob.cloud->points)
00199   {
00200     const tf::Point in(pcl16_pt.x, pcl16_pt.y, pcl16_pt.z);
00201     const tf::Point out = trans*in;
00202 
00203     // Convert to open cv
00204     cv::Point3d pt_cv(out.x(), out.y(), out.z());
00205     cv::Point2d image_pt = cam_model_.project3dToPixel(pt_cv);
00206     points.push_back(image_pt);
00207 
00208     if (!min_x || *min_x > image_pt.x)
00209       min_x = image_pt.x;
00210     if (!min_y || *min_y > image_pt.y)
00211       min_y = image_pt.y;
00212     if (!max_x || *max_x < image_pt.x)
00213       max_x = image_pt.x;
00214     if (!max_y || *max_y < image_pt.y)
00215       max_y = image_pt.y;
00216   }
00217   cv::Mat points_mat(points);
00218   vector<int> hull;
00219   cv::convexHull(points_mat, hull);
00220   const int n = hull.size();
00221 
00222   cv::Point* poly = new cv::Point[n];
00223   for (int i=0; i<n; i++)
00224     poly[i] = points[hull[i]];
00225   cv::polylines(image->image, (const cv::Point**) &poly, &n, 1, true,
00226                 CV_RGB(0, 255, 0), 2);
00227 
00228   ROS_ASSERT(min_x && min_y && max_x && max_y);
00229   /*cv::rectangle(image->image, cv::Point2d(*min_x, *min_y),
00230     cv::Point2d(*max_x, *max_y), CV_RGB(0, 255, 0), 2);*/
00231 
00232 
00233   delete [] poly;
00234 
00235   // Convert back to ros message and return
00236   return image->toImageMsg();
00237 }
00238 
00239 double entropy(const vector<unsigned>& hist)
00240 {
00241   double sum=0;
00242   const unsigned n = hist.size();
00243   BOOST_FOREACH (const unsigned c, hist)
00244     sum += c;
00245   double ent=0;
00246   for (unsigned i=0; i<n; i++)
00247   {
00248     const double p = (float)hist[i]/sum;
00249 //    ROS_INFO ("Normalizing %u to %.2f", hist[i], p);
00250     if (hist[i]>0)
00251       ent -= p*log(p)/log(2);
00252   }
00253 //  ROS_INFO ("Entropy is %.2f", ent);
00254   return ent;
00255 }
00256 
00257 void normalize (const pcl16::Normal& n, double* x, double* y, double* z)
00258 {
00259   *x = n.normal[0];
00260   *y = n.normal[1];
00261   *z = n.normal[2];
00262   const double r = sqrt(pow(*x, 2) + pow(*y, 2) + pow(*z, 2));
00263   *x = n.normal[0]/r;
00264   *y = n.normal[1]/r;
00265   *z = n.normal[2]/r;
00266 }
00267 
00268 unsigned numNormalDirections (const vector<unsigned>& hist)
00269 {
00270   unsigned n=0;
00271   const unsigned NORMAL_DIRECTION_THRESHOLD=50;
00272   BOOST_FOREACH (const unsigned i, hist)
00273     if (i>NORMAL_DIRECTION_THRESHOLD)
00274       n++;
00275   return n;
00276 }
00277 
00278 mr::Metadata blobMetadata (const Blob& blob)
00279 {
00280 //  ROS_INFO ("Computing blob metadata");
00281   // Set up normal estimation class
00282   pcl16::NormalEstimation<Point, pcl16::Normal> ne;
00283   ne.setInputCloud(blob.cloud);
00284 
00285   // We only want normals at a subset of the points for efficiency
00286   const unsigned MAX_NUMBER_POINTS = 1000;
00287   const unsigned num_points = blob.cloud->points.size();
00288   pcl16::PointIndices::Ptr indices(new pcl16::PointIndices());
00289   const unsigned step = ceil((double)num_points/MAX_NUMBER_POINTS);
00290 //  ROS_INFO ("Num points is %u, using step %u", num_points, step);
00291   for (unsigned ind=0; ind<num_points; ind+=step)
00292     indices->indices.push_back(ind);
00293 //  ROS_INFO ("Using %zu indices", indices->indices.size());
00294   ne.setIndices(indices);
00295 
00296   // Do the normal computation
00297   pcl16::search::KdTree<Point>::Ptr tree(new pcl16::search::KdTree<Point>);
00298   ne.setSearchMethod(tree);
00299   pcl16::PointCloud<pcl16::Normal>::Ptr normals(new pcl16::PointCloud<pcl16::Normal>());
00300   ne.setRadiusSearch(0.03);
00301   ne.compute(*normals);
00302 //  ROS_INFO ("Computed %zu normals", normals->size());
00303   //const pcl16::Normal& n = normals->points[0];
00304 //  ROS_INFO ("First normal is %.2f, %.2f %.2f, with curvature %.2f",
00305 //      n.normal[0], n.normal[1], n.normal[2], n.curvature);
00306 
00307   // Geometry analysis
00308   unsigned num_vertical = 0;
00309   const double VERTICALITY_THRESHOLD=0.9;
00310   vector<unsigned> c(10);
00311   const unsigned NUM_LONGITUDE_BINS=10;
00312   vector<unsigned> longitude_hist(NUM_LONGITUDE_BINS);
00313   BOOST_FOREACH (const pcl16::Normal& n, normals->points)
00314   {
00315     double x=42, y=42, z=42;
00316     normalize(n, &x, &y, &z);
00317     unsigned i = n.curvature < 0.5 ? floor(n.curvature*20) : 9;
00318     c.at(i)++;
00319     if (z>VERTICALITY_THRESHOLD)
00320       num_vertical++;
00321     else
00322     {
00323       const double PI=3.1415;
00324       const double longitude = atan2(y, x);
00325       const double normalized = min(0.999, max((longitude+PI)/(2*PI), 0.0));
00326       const unsigned ind=floor(normalized*NUM_LONGITUDE_BINS);
00327       longitude_hist.at(ind)++;
00328     }
00329   }
00330 
00331   // Compute other info and return
00332   const GeometricInfo geom_info = centroidAndDiameter(*blob.cloud);
00333   const double np=normals->points.size();
00334   return mr::Metadata().append("color", blobColorName(blob)).
00335     append("id", blob.id).
00336     append("diameter", geom_info.d).
00337     append("x", geom_info.x).
00338     append("y", geom_info.y).
00339     append("z", geom_info.z).
00340     append("num_normals", np).
00341     append("num_vertical", num_vertical).
00342     append("c01", (c[0]+c[1])/np).
00343     append("c0", c[0]/np).
00344     append("c23", (c[2]+c[3])/np).
00345     append("c2", c[2]/np).
00346     append("c4", (c[4]+c[5]+c[6]+c[7]+c[8]+c[9])/np).
00347     append("num_normal_directions", numNormalDirections(longitude_hist));
00348 }
00349 
00350 
00351 
00352 mr::Metadata imageMetadata (const Blob& blob, const gm::Pose2D& viewpoint)
00353 {
00354   return mr::Metadata("cluster_id", blob.id, "x",
00355                       viewpoint.x, "y", viewpoint.y,
00356                       "theta", viewpoint.theta);
00357 }
00358 
00359 vector<gm::Point> projectPoints (const Blob& blob)
00360 {
00361   vector<gm::Point> points;
00362   BOOST_FOREACH (const Point& pt, blob.hull->points)
00363   {
00364     gm::Point p;
00365     p.x = pt.x;
00366     p.y = pt.y;
00367     p.z = -42.424242;
00368     points.push_back(p);
00369   }
00370   return points;
00371 }
00372 
00373 gm::Pose2D BlobStore::getViewpoint (const sm::Image& img)
00374 {
00375   const ros::Time& acquisition_time = img.header.stamp;
00376   const ros::Duration timeout(0.5);
00377   // No need to wait: it's old, so if we don't have it now we won't get it later
00378   // tf_.waitForTransform(canonical_frame_, base_frame_, acquisition_time, timeout);
00379   
00380   tf::StampedTransform tr;
00381   tf_.lookupTransform(canonical_frame_, base_frame_, acquisition_time, tr);
00382   
00383   gm::Pose2D pose;
00384   pose.x = tr.getOrigin().x();
00385   pose.y = tr.getOrigin().y();
00386   pose.theta = tf::getYaw(tr.getRotation());
00387   return pose;
00388 }
00389 
00390 void BlobStore::update (const vector<Blob*>& blobs,
00391                         const sm::Image& img,
00392                         const sm::CameraInfo& img_info)
00393 
00394 {
00395   Lock l(mutex_);
00396   ROS_ASSERT_MSG (blobs_ && images_, "Update was called on BlobStore, but"
00397                   " DB collections have not been initialized yet.  Skipping.");
00398   const double head_angular_speed = headAngularSpeed(img.header.stamp);
00399   
00400   typedef mr::MessageWithMetadata<BlobMessage>::ConstPtr BlobWithMetadata;
00401   std::set<int> seen_ids, existing_ids;
00402   ROS_DEBUG_STREAM_NAMED(
00403       "blob_store", 
00404       "Received update at " << img.header.stamp << " with " <<
00405       blobs.size() << " blobs; head angular speed is " <<
00406       head_angular_speed);
00407   BOOST_FOREACH (Blob* blob, blobs)
00408   {
00409     ROS_DEBUG_STREAM_NAMED ("blob_store", "Processing blob " << blob->id);
00410     seen_ids.insert(blob->id);
00411     mr::Query q("id", blob->id);
00412     vector<BlobWithMetadata> matches = blobs_->pullAllResults(q);
00413     if (matches.empty())
00414     {
00415       mr::Metadata metadata = blobMetadata(*blob);
00416       metadata.append("head_speed", head_angular_speed);
00417 
00418       try
00419       {
00420         sm::Image::Ptr updated = highlightBlobInImage(*blob, img, img_info);
00421         gm::Pose2D viewpoint = getViewpoint(img);
00422         BlobMessage m;
00423         pcl16::toROSMsg(*blob->cloud, m.points);
00424         m.image = *updated;
00425         m.hull = projectPoints(*blob);
00426         blobs_->insert(m, metadata);
00427         pub_.publish(updated);
00428         images_->insert(*updated, imageMetadata(*blob, viewpoint));
00429       }
00430       catch (tf::TransformException& e)
00431       {
00432         ROS_WARN_STREAM ("Transform exception when storing blob: " << e.what());
00433       }
00434     }
00435 
00436     // If there is already a cluster with the given id, possibly update its
00437     // info
00438     else
00439     {
00440       ROS_ASSERT(matches.size()==1);
00441       PointCloud existing;
00442       const int id = matches[0]->lookupInt("id");
00443 
00444       // Get metadata for the existing image so we can delete it later
00445       typedef mr::MessageWithMetadata<sm::Image>::ConstPtr ImageWithMetadata;
00446       vector<ImageWithMetadata> image_matches =
00447         images_->pullAllResults(mr::Query("cluster_id", id));
00448       ROS_ASSERT(image_matches.size()==1);
00449 
00450       // Convert cloud to pcl16.  Note pcl16 conversion dies on empty clouds
00451       if (matches[0]->points.data.empty())
00452       {
00453         ROS_DEBUG_NAMED ("blob_store", "Skipping empty cloud");
00454         continue;
00455       }
00456       pcl16::fromROSMsg(matches[0]->points, existing);
00457       
00458       // Quick check: if number of points has not changed, we won't update the
00459       // data for this object.  This also ensures that the object is visible
00460       // in the image
00461       const size_t num_points = blob->cloud->points.size();
00462       const size_t num_existing = existing.points.size();
00463       if (num_existing==num_points)
00464       {
00465         ROS_DEBUG_NAMED ("blob_store", "No new points for %d; skipping", id);
00466         continue;
00467       }
00468 
00469       // Try to update the metadata and store an image with the object
00470       // highlighted.  This can fail due to transform timeouts (between the
00471       // camera and map frames), in which case we just skip this object this
00472       // time.
00473       try
00474       {
00475         mr::Metadata metadata = blobMetadata(*blob); 
00476         const double prev_speed = matches[0]->lookupDouble("head_speed");
00477 
00478         // These next two could theoretically throw tf exceptions
00479         const sm::Image::Ptr updated = 
00480           highlightBlobInImage(*blob, img, img_info);
00481         const gm::Pose2D viewpoint = getViewpoint(img);
00482         
00483         BlobMessage m;
00484         pcl16::toROSMsg(*blob->cloud, m.points);
00485         m.image = *updated;
00486         m.hull = projectPoints(*blob);
00487 
00488         // If the head is moving fast relative to the base, we don't store the
00489         // image because it's likely to be blurred.   
00490         if (prev_speed < head_angular_speed - max_head_angular_speed_/5)
00491         {
00492           ROS_DEBUG_STREAM_NAMED ("blob_store", "For " << id << ", speed " <<
00493                                   head_angular_speed << " too high relative to "
00494                                   << prev_speed << "; not updating image");
00495           metadata.append("head_speed", prev_speed);
00496         }
00497         else
00498         {
00499           images_->insert(*updated, imageMetadata(*blob, viewpoint));
00500           metadata.append("head_speed", head_angular_speed);
00501 
00502           const unsigned num_removed =
00503             images_->removeMessages(image_matches[0]->metadata);
00504           ROS_ASSERT(num_removed==1);
00505         }
00506 
00507         // Regardless of the head speed, we'll update the semantic metadata,
00508         // by inserting the new cluster and removing the old one.  Note that
00509         // due to the lack of transactions in mongo, both messages coexist in
00510         // the db temporarily.  Queries by id should therefore always only use
00511         // the most recent message for a given id.
00512         blobs_->insert(m, metadata);
00513         const unsigned num_removed = 
00514           blobs_->removeMessages(matches[0]->metadata);
00515         ROS_ASSERT(num_removed==1);
00516         ROS_DEBUG_STREAM_NAMED ("blob_store", "Removed message " <<
00517                                 matches[0]->metadata);
00518         
00519       }
00520       catch (tf::TransformException& e)
00521       {
00522         ROS_WARN_STREAM("Transform exception when updating blob: " << e.what());
00523       }
00524     }
00525   }
00526   
00527   // Remove dangling ids
00528   BOOST_FOREACH (BlobWithMetadata b, blobs_->queryResults(mr::Query(), true))
00529     existing_ids.insert(b->lookupInt("id"));
00530 
00531   BOOST_FOREACH (const int id, existing_ids)
00532   {
00533     if (seen_ids.find(id)==seen_ids.end())
00534     {
00535       ROS_DEBUG_STREAM_NAMED ("blob_store", "Removing id " << id);
00536       blobs_->removeMessages(mr::Query("id", id));
00537       images_->removeMessages(mr::Query("cluster_id", id));
00538     }      
00539   }
00540 }
00541 
00542 } // namespace
00543  


semanticmodel
Author(s): Julian ("Mac") Mason
autogenerated on Thu Dec 12 2013 12:39:10