depth_layer.cpp
Go to the documentation of this file.
00001 /*
00002  * Copyright (c) 2015-2016, Fetch Robotics Inc.
00003  * All rights reserved.
00004  *
00005  * Redistribution and use in source and binary forms, with or without
00006  * modification, are permitted provided that the following conditions are met:
00007  *
00008  *     * Redistributions of source code must retain the above copyright
00009  *       notice, this list of conditions and the following disclaimer.
00010  *     * Redistributions in binary form must reproduce the above copyright
00011  *       notice, this list of conditions and the following disclaimer in the
00012  *       documentation and/or other materials provided with the distribution.
00013  *     * Neither the name of the Fetch Robotics Inc. nor the names of its
00014  *       contributors may be used to endorse or promote products derived from
00015  *       this software without specific prior written permission.
00016  *
00017  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00018  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00019  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00020  * ARE DISCLAIMED. IN NO EVENT SHALL FETCH ROBOTICS INC. BE LIABLE FOR ANY
00021  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
00022  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00023  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
00024  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
00025  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
00026  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00027  */
00028 
00029 // Author: Anuj Pasricha, Michael Ferguson
00030 
00031 #include <pluginlib/class_list_macros.h>
00032 #include <fetch_depth_layer/depth_layer.h>
00033 #include <limits>
00034 
00035 PLUGINLIB_EXPORT_CLASS(costmap_2d::FetchDepthLayer, costmap_2d::Layer)
00036 
00037 namespace costmap_2d
00038 {
00039 
00040 FetchDepthLayer::FetchDepthLayer()
00041 {
00042 }
00043 
00044 void FetchDepthLayer::onInitialize()
00045 {
00046   VoxelLayer::onInitialize();
00047 
00048   double observation_keep_time;
00049   double expected_update_rate;
00050   double transform_tolerance;
00051   double obstacle_range;
00052   double raytrace_range;
00053   double min_obstacle_height;
00054   double max_obstacle_height;
00055   double min_clearing_height;
00056   double max_clearing_height;
00057   std::string topic = "";
00058   std::string sensor_frame = "";
00059 
00060   ros::NodeHandle private_nh("~/" + name_);
00061 
00062   private_nh.param("publish_observations", publish_observations_, false);
00063   private_nh.param("observations_separation_threshold", observations_threshold_, 0.06);
00064 
00065   // Optionally detect the ground plane
00066   private_nh.param("find_ground_plane", find_ground_plane_, true);
00067   private_nh.param("ground_orientation_threshold", ground_threshold_, 0.9);
00068 
00069   // Should NANs be used as clearing observations?
00070   private_nh.param("clear_nans", clear_nans_, false);
00071 
00072   // Observation range values for both marking and clearing
00073   private_nh.param("min_obstacle_height", min_obstacle_height, 0.0);
00074   private_nh.param("max_obstacle_height", max_obstacle_height, 2.0);
00075   private_nh.param("min_clearing_height", min_clearing_height, -std::numeric_limits<double>::infinity());
00076   private_nh.param("max_clearing_height", max_clearing_height, std::numeric_limits<double>::infinity());
00077 
00078   // Skipping of potentially noisy rays near the edge of the image
00079   private_nh.param("skip_rays_bottom", skip_rays_bottom_, 20);
00080   private_nh.param("skip_rays_top",    skip_rays_top_,    20);
00081   private_nh.param("skip_rays_left",   skip_rays_left_,   20);
00082   private_nh.param("skip_rays_right",  skip_rays_right_,  20);
00083 
00084   // Should skipped edge rays be used for clearing?
00085   private_nh.param("clear_with_skipped_rays", clear_with_skipped_rays_, false);
00086 
00087   // How long should observations persist?
00088   private_nh.param("observation_persistence", observation_keep_time, 0.0);
00089 
00090   // How often should we expect to get sensor updates?
00091   private_nh.param("expected_update_rate", expected_update_rate, 0.0);
00092 
00093   // How long to wait for transforms to be available?
00094   private_nh.param("transform_tolerance", transform_tolerance, 0.5);
00095 
00096   std::string raytrace_range_param_name, obstacle_range_param_name;
00097 
00098   // get the obstacle range for the sensor
00099   obstacle_range = 2.5;
00100   if (private_nh.searchParam("obstacle_range", obstacle_range_param_name))
00101   {
00102     private_nh.getParam(obstacle_range_param_name, obstacle_range);
00103   }
00104 
00105   // get the raytrace range for the sensor
00106   raytrace_range = 3.0;
00107   if (private_nh.searchParam("raytrace_range", raytrace_range_param_name))
00108   {
00109     private_nh.getParam(raytrace_range_param_name, raytrace_range);
00110   }
00111 
00112   marking_buf_ = boost::shared_ptr<costmap_2d::ObservationBuffer> (
00113         new costmap_2d::ObservationBuffer(topic, observation_keep_time,
00114           expected_update_rate, min_obstacle_height, max_obstacle_height,
00115           obstacle_range, raytrace_range, *tf_, global_frame_,
00116           sensor_frame, transform_tolerance));
00117   marking_buffers_.push_back(marking_buf_);
00118   observation_buffers_.push_back(marking_buf_);
00119 
00120   min_obstacle_height = 0.0;
00121 
00122   clearing_buf_ =  boost::shared_ptr<costmap_2d::ObservationBuffer> (
00123         new costmap_2d::ObservationBuffer(topic, observation_keep_time,
00124           expected_update_rate, min_clearing_height, max_clearing_height,
00125           obstacle_range, raytrace_range, *tf_, global_frame_,
00126           sensor_frame, transform_tolerance));
00127   clearing_buffers_.push_back(clearing_buf_);
00128   observation_buffers_.push_back(clearing_buf_);
00129 
00130   if (publish_observations_)
00131   {
00132     clearing_pub_ = private_nh.advertise<sensor_msgs::PointCloud>("clearing_obs", 1);
00133     marking_pub_ = private_nh.advertise<sensor_msgs::PointCloud>("marking_obs", 1);
00134   }
00135 
00136   // subscribe to camera/info topics
00137   std::string camera_depth_topic, camera_info_topic;
00138   private_nh.param("depth_topic", camera_depth_topic,
00139                    std::string("/head_camera/depth_downsample/image_raw"));
00140   private_nh.param("info_topic", camera_info_topic,
00141                    std::string("/head_camera/depth_downsample/camera_info"));
00142   camera_info_sub_ = private_nh.subscribe<sensor_msgs::CameraInfo>(
00143     camera_info_topic, 10, &FetchDepthLayer::cameraInfoCallback, this);
00144 
00145   depth_image_sub_.reset(new message_filters::Subscriber<sensor_msgs::Image>(private_nh, camera_depth_topic, 10));
00146   depth_image_filter_ = boost::shared_ptr< tf::MessageFilter<sensor_msgs::Image> >(
00147     new tf::MessageFilter<sensor_msgs::Image>(*depth_image_sub_, *tf_, global_frame_, 10));
00148   depth_image_filter_->registerCallback(boost::bind(&FetchDepthLayer::depthImageCallback, this, _1));
00149   observation_subscribers_.push_back(depth_image_sub_);
00150   observation_notifiers_.push_back(depth_image_filter_);
00151   observation_notifiers_.back()->setTolerance(ros::Duration(0.05));
00152 }
00153 
00154 FetchDepthLayer::~FetchDepthLayer()
00155 {
00156 }
00157 
00158 void FetchDepthLayer::cameraInfoCallback(
00159   const sensor_msgs::CameraInfo::ConstPtr& msg)
00160 {
00161   // Lock mutex before updating K
00162   boost::unique_lock<boost::mutex> lock(mutex_K_);
00163 
00164   float focal_pixels_ = msg->P[0];
00165   float center_x_ = msg->P[2];
00166   float center_y_ = msg->P[6];
00167 
00168   if (msg->binning_x == msg->binning_y)
00169   {
00170     if (msg->binning_x > 0)
00171     {
00172       K_ = (cv::Mat_<double>(3, 3) <<
00173         focal_pixels_/msg->binning_x, 0.0, center_x_/msg->binning_x,
00174         0.0, focal_pixels_/msg->binning_x, center_y_/msg->binning_x,
00175         0.0, 0.0, 1.0);
00176     }
00177     else
00178     {
00179       K_ = (cv::Mat_<double>(3, 3) <<
00180         focal_pixels_, 0.0, center_x_,
00181         0.0, focal_pixels_, center_y_,
00182         0.0, 0.0, 1.0);
00183     }
00184   }
00185   else
00186   {
00187     ROS_ERROR("binning_x is not equal to binning_y");
00188   }
00189 }
00190 
00191 void FetchDepthLayer::depthImageCallback(
00192   const sensor_msgs::Image::ConstPtr& msg)
00193 {
00194   // Lock mutex before using K
00195   boost::unique_lock<boost::mutex> lock(mutex_K_);
00196 
00197   if (K_.empty())
00198   {
00199     ROS_DEBUG_NAMED("depth_layer", "Camera info not yet received.");
00200     return;
00201   }
00202 
00203   cv_bridge::CvImagePtr cv_ptr;
00204   try
00205   {
00206     cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::TYPE_32FC1);
00207   }
00208   catch (cv_bridge::Exception& e)
00209   {
00210     ROS_ERROR("cv_bridge exception: %s", e.what());
00211     return;
00212   }
00213 
00214   // Clear with NANs?
00215   if (clear_nans_)
00216   {
00217     for (int i = 0; i < cv_ptr->image.rows * cv_ptr->image.cols; i++)
00218     {
00219       if (std::isnan(cv_ptr->image.at<float>(i)))
00220         cv_ptr->image.at<float>(i) = 25.0;
00221     }
00222   }
00223 
00224   // Convert to 3d
00225   cv::Mat points3d;
00226   depthTo3d(cv_ptr->image, K_, points3d);
00227 
00228   // Determine ground plane, either through camera or TF
00229   cv::Vec4f ground_plane;
00230   if (find_ground_plane_)
00231   {
00232     // Get normals
00233     if (normals_estimator_.empty())
00234     {
00235       normals_estimator_ = new RgbdNormals(cv_ptr->image.rows,
00236                                            cv_ptr->image.cols,
00237                                            cv_ptr->image.depth(),
00238                                            K_);
00239     }
00240     cv::Mat normals;
00241     (*normals_estimator_)(points3d, normals);
00242 
00243     // Find plane(s)
00244     if (plane_estimator_.empty())
00245     {
00246 #if CV_MAJOR_VERSION == 3
00247       plane_estimator_.reset(new RgbdPlane());
00248       // Model parameters are based on notes in opencv_candidate
00249       plane_estimator_->setSensorErrorA(0.0075);
00250       plane_estimator_->setSensorErrorB(0.0);
00251       plane_estimator_->setSensorErrorC(0.0);
00252       // Image/cloud height/width must be multiple of block size
00253       plane_estimator_->setBlockSize(40);
00254       // Distance a point can be from plane and still be part of it
00255       plane_estimator_->setThreshold(observations_threshold_);
00256       // Minimum cluster size to be a plane
00257       plane_estimator_->setMinSize(1000);
00258 #else
00259       plane_estimator_ = cv::Algorithm::create<RgbdPlane>("RGBD.RgbdPlane");
00260       // Model parameters are based on notes in opencv_candidate
00261       plane_estimator_->set("sensor_error_a", 0.0075);
00262       plane_estimator_->set("sensor_error_b", 0.0);
00263       plane_estimator_->set("sensor_error_c", 0.0);
00264       // Image/cloud height/width must be multiple of block size
00265       plane_estimator_->set("block_size", 40);
00266       // Distance a point can be from plane and still be part of it
00267       plane_estimator_->set("threshold", observations_threshold_);
00268       // Minimum cluster size to be a plane
00269       plane_estimator_->set("min_size", 1000);
00270 #endif
00271     }
00272     cv::Mat planes_mask;
00273     std::vector<cv::Vec4f> plane_coefficients;
00274     (*plane_estimator_)(points3d, normals, planes_mask, plane_coefficients);
00275 
00276     for (size_t i = 0; i < plane_coefficients.size(); i++)
00277     {
00278       // check plane orientation
00279       if ((fabs(0.0 - plane_coefficients[i][0]) <= ground_threshold_) &&
00280           (fabs(1.0 + plane_coefficients[i][1]) <= ground_threshold_) &&
00281           (fabs(0.0 - plane_coefficients[i][2]) <= ground_threshold_))
00282       {
00283         ground_plane = plane_coefficients[i];
00284         break;
00285       }
00286     }
00287   }
00288   else
00289   {
00290     // find ground plane in camera coordinates using tf
00291     // transform normal axis
00292     tf::Stamped<tf::Vector3> vector(tf::Vector3(0, 0, 1), ros::Time(0), "base_link");
00293     tf_->transformVector(msg->header.frame_id, vector, vector);
00294     ground_plane[0] = vector.getX();
00295     ground_plane[1] = vector.getY();
00296     ground_plane[2] = vector.getZ();
00297 
00298     // find offset
00299     tf::StampedTransform transform;
00300     tf_->lookupTransform("base_link", msg->header.frame_id, ros::Time(0), transform);
00301     ground_plane[3] = transform.getOrigin().getZ();
00302   }
00303 
00304   // check that ground plane actually exists, so it doesn't count as marking observations
00305   if (ground_plane[0] == 0.0 && ground_plane[1] == 0.0 &&
00306       ground_plane[2] == 0.0 && ground_plane[3] == 0.0)
00307   {
00308     ROS_DEBUG_NAMED("depth_layer", "Invalid ground plane.");
00309     return;
00310   }
00311 
00312   cv::Mat channels[3];
00313   cv::split(points3d, channels);
00314 
00315   sensor_msgs::PointCloud clearing_points;
00316   clearing_points.header.stamp = msg->header.stamp;
00317   clearing_points.header.frame_id = msg->header.frame_id;
00318 
00319   sensor_msgs::PointCloud marking_points;
00320   marking_points.header.stamp = msg->header.stamp;
00321   marking_points.header.frame_id = msg->header.frame_id;
00322 
00323   // Put points in clearing/marking clouds
00324   for (size_t i = 0; i < points3d.rows; i++)
00325   {
00326     for (size_t j = 0; j < points3d.cols; j++)
00327     {
00328       // Get next point
00329       geometry_msgs::Point32 current_point;
00330       current_point.x = channels[0].at<float>(i, j);
00331       current_point.y = channels[1].at<float>(i, j);
00332       current_point.z = channels[2].at<float>(i, j);
00333       // Check point validity
00334       if (current_point.x != 0.0 &&
00335           current_point.y != 0.0 &&
00336           current_point.z != 0.0 &&
00337           !std::isnan(current_point.x) &&
00338           !std::isnan(current_point.y) &&
00339           !std::isnan(current_point.z))
00340       {
00341         if (clear_with_skipped_rays_)
00342         {
00343           // If edge rays are to be used for clearing, go ahead and add them now.
00344           clearing_points.points.push_back(current_point);
00345         }
00346 
00347         // Do not consider boundary points for obstacles marking since they are very noisy.
00348         if (i < skip_rays_top_ ||
00349             i >= points3d.rows - skip_rays_bottom_ ||
00350             j < skip_rays_left_ ||
00351             j >= points3d.cols - skip_rays_right_)
00352         {
00353           continue;
00354         }
00355 
00356         if (!clear_with_skipped_rays_)
00357         {
00358           // If edge rays are not to be used for clearing, only add them after the edge check.
00359           clearing_points.points.push_back(current_point);
00360         }
00361 
00362         // Check if point is part of the ground plane
00363         if (fabs(ground_plane[0] * current_point.x +
00364                  ground_plane[1] * current_point.y +
00365                  ground_plane[2] * current_point.z +
00366                  ground_plane[3]) <= observations_threshold_)
00367         {
00368           continue;  // Do not mark points near the floor.
00369         }
00370 
00371         // Check for outliers, mark non-outliers as obstacles.
00372         int num_valid = 0;
00373         for (int x = -1; x < 2; x++)
00374         {
00375           for (int y = -1; y < 2; y++)
00376           {
00377             if (x == 0 && y == 0)
00378             {
00379               continue;
00380             }
00381             float px = channels[0].at<float>(i+x, j+y);
00382             float py = channels[1].at<float>(i+x, j+y);
00383             float pz = channels[2].at<float>(i+x, j+y);
00384             if (px != 0.0 && py != 0.0 && pz != 0.0 &&
00385                 !std::isnan(px) && !std::isnan(py) && !std::isnan(pz))
00386             {
00387               if ( fabs(px - current_point.x) < 0.1 &&
00388                     fabs(py - current_point.y) < 0.1 &&
00389                     fabs(pz - current_point.z) < 0.1)
00390               {
00391                 num_valid++;
00392               }
00393             }
00394           }  // for y
00395         }  // for x
00396 
00397         if (num_valid >= 7)
00398         {
00399           marking_points.points.push_back(current_point);
00400         }
00401       }  // for j (y)
00402     }  // for i (x)
00403   }
00404 
00405   // Publish and buffer our clearing point cloud
00406   if (publish_observations_)
00407   {
00408     clearing_pub_.publish(clearing_points);
00409   }
00410 
00411   sensor_msgs::PointCloud2 clearing_cloud2;
00412   if (!sensor_msgs::convertPointCloudToPointCloud2(clearing_points, clearing_cloud2))
00413   {
00414     ROS_ERROR("Failed to convert a PointCloud to a PointCloud2, dropping message");
00415     return;
00416   }
00417 
00418   // buffer the ground plane observation
00419   clearing_buf_->lock();
00420   clearing_buf_->bufferCloud(clearing_cloud2);
00421   clearing_buf_->unlock();
00422 
00423   // Publish and buffer our marking point cloud
00424   if (publish_observations_)
00425   {
00426     marking_pub_.publish(marking_points);
00427   }
00428 
00429   sensor_msgs::PointCloud2 marking_cloud2;
00430   if (!sensor_msgs::convertPointCloudToPointCloud2(marking_points, marking_cloud2))
00431   {
00432     ROS_ERROR("Failed to convert a PointCloud to a PointCloud2, dropping message");
00433     return;
00434   }
00435 
00436   marking_buf_->lock();
00437   marking_buf_->bufferCloud(marking_cloud2);
00438   marking_buf_->unlock();
00439 }
00440 
00441 }  // namespace costmap_2d


fetch_depth_layer
Author(s):
autogenerated on Sat Aug 5 2017 04:00:24