cvfh.hpp
Go to the documentation of this file.
00001 /*
00002  * Software License Agreement (BSD License)
00003  *
00004  *  Point Cloud Library (PCL) - www.pointclouds.org
00005  *  Copyright (c) 2010-2011, Willow Garage, Inc.
00006  *  Copyright (c) 2012-, Open Perception, Inc.
00007  *
00008  *  All rights reserved.
00009  *
00010  *  Redistribution and use in source and binary forms, with or without
00011  *  modification, are permitted provided that the following conditions
00012  *  are met:
00013  *
00014  *   * Redistributions of source code must retain the above copyright
00015  *     notice, this list of conditions and the following disclaimer.
00016  *   * Redistributions in binary form must reproduce the above
00017  *     copyright notice, this list of conditions and the following
00018  *     disclaimer in the documentation and/or other materials provided
00019  *     with the distribution.
00020  *   * Neither the name of the copyright holder(s) nor the names of its
00021  *     contributors may be used to endorse or promote products derived
00022  *     from this software without specific prior written permission.
00023  *
00024  *  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00025  *  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00026  *  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00027  *  FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00028  *  COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00029  *  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00030  *  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00031  *  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00032  *  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00033  *  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00034  *  ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00035  *  POSSIBILITY OF SUCH DAMAGE.
00036  *
00037  * $Id: cvfh.hpp 6144 2012-07-04 22:06:28Z rusu $
00038  *
00039  */
00040 
00041 #ifndef PCL_FEATURES_IMPL_CVFH_H_
00042 #define PCL_FEATURES_IMPL_CVFH_H_
00043 
00044 #include <pcl/features/cvfh.h>
00045 #include <pcl/features/pfh.h>
00046 
00048 template<typename PointInT, typename PointNT, typename PointOutT> void
00049 pcl::CVFHEstimation<PointInT, PointNT, PointOutT>::compute (PointCloudOut &output)
00050 {
00051   if (!Feature<PointInT, PointOutT>::initCompute ())
00052   {
00053     output.width = output.height = 0;
00054     output.points.clear ();
00055     return;
00056   }
00057   // Resize the output dataset
00058   // Important! We should only allocate precisely how many elements we will need, otherwise
00059   // we risk at pre-allocating too much memory which could lead to bad_alloc 
00060   // (see http://dev.pointclouds.org/issues/657)
00061   output.width = output.height = 1;
00062   output.points.resize (1);
00063 
00064   // Perform the actual feature computation
00065   computeFeature (output);
00066 
00067   Feature<PointInT, PointOutT>::deinitCompute ();
00068 }
00069 
00071 template<typename PointInT, typename PointNT, typename PointOutT> void
00072 pcl::CVFHEstimation<PointInT, PointNT, PointOutT>::extractEuclideanClustersSmooth (
00073     const pcl::PointCloud<pcl::PointNormal> &cloud,
00074     const pcl::PointCloud<pcl::PointNormal> &normals,
00075     float tolerance,
00076     const pcl::search::Search<pcl::PointNormal>::Ptr &tree,
00077     std::vector<pcl::PointIndices> &clusters,
00078     double eps_angle,
00079     unsigned int min_pts_per_cluster,
00080     unsigned int max_pts_per_cluster)
00081 {
00082   if (tree->getInputCloud ()->points.size () != cloud.points.size ())
00083   {
00084     PCL_ERROR ("[pcl::extractEuclideanClusters] Tree built for a different point cloud dataset (%zu) than the input cloud (%zu)!\n", tree->getInputCloud ()->points.size (), cloud.points.size ());
00085     return;
00086   }
00087   if (cloud.points.size () != normals.points.size ())
00088   {
00089     PCL_ERROR ("[pcl::extractEuclideanClusters] Number of points in the input point cloud (%zu) different than normals (%zu)!\n", cloud.points.size (), normals.points.size ());
00090     return;
00091   }
00092 
00093   // Create a bool vector of processed point indices, and initialize it to false
00094   std::vector<bool> processed (cloud.points.size (), false);
00095 
00096   std::vector<int> nn_indices;
00097   std::vector<float> nn_distances;
00098   // Process all points in the indices vector
00099   for (int i = 0; i < static_cast<int> (cloud.points.size ()); ++i)
00100   {
00101     if (processed[i])
00102       continue;
00103 
00104     std::vector<unsigned int> seed_queue;
00105     int sq_idx = 0;
00106     seed_queue.push_back (i);
00107 
00108     processed[i] = true;
00109 
00110     while (sq_idx < static_cast<int> (seed_queue.size ()))
00111     {
00112       // Search for sq_idx
00113       if (!tree->radiusSearch (seed_queue[sq_idx], tolerance, nn_indices, nn_distances))
00114       {
00115         sq_idx++;
00116         continue;
00117       }
00118 
00119       for (size_t j = 1; j < nn_indices.size (); ++j) // nn_indices[0] should be sq_idx
00120       {
00121         if (processed[nn_indices[j]]) // Has this point been processed before ?
00122           continue;
00123 
00124         //processed[nn_indices[j]] = true;
00125         // [-1;1]
00126 
00127         double dot_p = normals.points[seed_queue[sq_idx]].normal[0] * normals.points[nn_indices[j]].normal[0]
00128                      + normals.points[seed_queue[sq_idx]].normal[1] * normals.points[nn_indices[j]].normal[1]
00129                      + normals.points[seed_queue[sq_idx]].normal[2] * normals.points[nn_indices[j]].normal[2];
00130 
00131         if (fabs (acos (dot_p)) < eps_angle)
00132         {
00133           processed[nn_indices[j]] = true;
00134           seed_queue.push_back (nn_indices[j]);
00135         }
00136       }
00137 
00138       sq_idx++;
00139     }
00140 
00141     // If this queue is satisfactory, add to the clusters
00142     if (seed_queue.size () >= min_pts_per_cluster && seed_queue.size () <= max_pts_per_cluster)
00143     {
00144       pcl::PointIndices r;
00145       r.indices.resize (seed_queue.size ());
00146       for (size_t j = 0; j < seed_queue.size (); ++j)
00147         r.indices[j] = seed_queue[j];
00148 
00149       std::sort (r.indices.begin (), r.indices.end ());
00150       r.indices.erase (std::unique (r.indices.begin (), r.indices.end ()), r.indices.end ());
00151 
00152       r.header = cloud.header;
00153       clusters.push_back (r); // We could avoid a copy by working directly in the vector
00154     }
00155   }
00156 }
00157 
00159 template<typename PointInT, typename PointNT, typename PointOutT> void
00160 pcl::CVFHEstimation<PointInT, PointNT, PointOutT>::filterNormalsWithHighCurvature (
00161     const pcl::PointCloud<PointNT> & cloud,
00162     std::vector<int> &indices_to_use,
00163     std::vector<int> &indices_out,
00164     std::vector<int> &indices_in,
00165     float threshold)
00166 {
00167   indices_out.resize (cloud.points.size ());
00168   indices_in.resize (cloud.points.size ());
00169 
00170   size_t in, out;
00171   in = out = 0;
00172 
00173   for (int i = 0; i < static_cast<int> (indices_to_use.size ()); i++)
00174   {
00175     if (cloud.points[indices_to_use[i]].curvature > threshold)
00176     {
00177       indices_out[out] = indices_to_use[i];
00178       out++;
00179     }
00180     else
00181     {
00182       indices_in[in] = indices_to_use[i];
00183       in++;
00184     }
00185   }
00186 
00187   indices_out.resize (out);
00188   indices_in.resize (in);
00189 }
00190 
00192 template<typename PointInT, typename PointNT, typename PointOutT> void
00193 pcl::CVFHEstimation<PointInT, PointNT, PointOutT>::computeFeature (PointCloudOut &output)
00194 {
00195   // Check if input was set
00196   if (!normals_)
00197   {
00198     PCL_ERROR ("[pcl::%s::computeFeature] No input dataset containing normals was given!\n", getClassName ().c_str ());
00199     output.width = output.height = 0;
00200     output.points.clear ();
00201     return;
00202   }
00203   if (normals_->points.size () != surface_->points.size ())
00204   {
00205     PCL_ERROR ("[pcl::%s::computeFeature] The number of points in the input dataset differs from the number of points in the dataset containing the normals!\n", getClassName ().c_str ());
00206     output.width = output.height = 0;
00207     output.points.clear ();
00208     return;
00209   }
00210 
00211   centroids_dominant_orientations_.clear ();
00212 
00213   // ---[ Step 0: remove normals with high curvature
00214   std::vector<int> indices_out;
00215   std::vector<int> indices_in;
00216   filterNormalsWithHighCurvature (*normals_, *indices_, indices_out, indices_in, curv_threshold_);
00217 
00218   pcl::PointCloud<pcl::PointNormal>::Ptr normals_filtered_cloud (new pcl::PointCloud<pcl::PointNormal> ());
00219   normals_filtered_cloud->width = static_cast<uint32_t> (indices_in.size ());
00220   normals_filtered_cloud->height = 1;
00221   normals_filtered_cloud->points.resize (normals_filtered_cloud->width);
00222 
00223   for (size_t i = 0; i < indices_in.size (); ++i)
00224   {
00225     normals_filtered_cloud->points[i].x = surface_->points[indices_in[i]].x;
00226     normals_filtered_cloud->points[i].y = surface_->points[indices_in[i]].y;
00227     normals_filtered_cloud->points[i].z = surface_->points[indices_in[i]].z;
00228   }
00229 
00230   std::vector<pcl::PointIndices> clusters;
00231 
00232   if(normals_filtered_cloud->points.size() >= min_points_)
00233   {
00234     //recompute normals and use them for clustering
00235     KdTreePtr normals_tree_filtered (new pcl::search::KdTree<pcl::PointNormal> (false));
00236     normals_tree_filtered->setInputCloud (normals_filtered_cloud);
00237 
00238     NormalEstimator n3d;
00239     n3d.setRadiusSearch (radius_normals_);
00240     n3d.setSearchMethod (normals_tree_filtered);
00241     n3d.setInputCloud (normals_filtered_cloud);
00242     n3d.compute (*normals_filtered_cloud);
00243 
00244     KdTreePtr normals_tree (new pcl::search::KdTree<pcl::PointNormal> (false));
00245     normals_tree->setInputCloud (normals_filtered_cloud);
00246 
00247     extractEuclideanClustersSmooth (*normals_filtered_cloud,
00248                                     *normals_filtered_cloud,
00249                                     cluster_tolerance_,
00250                                     normals_tree,
00251                                     clusters,
00252                                     eps_angle_threshold_,
00253                                     static_cast<unsigned int> (min_points_));
00254 
00255   }
00256 
00257   VFHEstimator vfh;
00258   vfh.setInputCloud (surface_);
00259   vfh.setInputNormals (normals_);
00260   vfh.setIndices(indices_);
00261   vfh.setSearchMethod (this->tree_);
00262   vfh.setUseGivenNormal (true);
00263   vfh.setUseGivenCentroid (true);
00264   vfh.setNormalizeBins (normalize_bins_);
00265   vfh.setNormalizeDistance (true);
00266   vfh.setFillSizeComponent (true);
00267   output.height = 1;
00268 
00269   // ---[ Step 1b : check if any dominant cluster was found
00270   if (clusters.size () > 0)
00271   { // ---[ Step 1b.1 : If yes, compute CVFH using the cluster information
00272 
00273     for (size_t i = 0; i < clusters.size (); ++i) //for each cluster
00274     {
00275       Eigen::Vector4f avg_normal = Eigen::Vector4f::Zero ();
00276       Eigen::Vector4f avg_centroid = Eigen::Vector4f::Zero ();
00277 
00278       for (size_t j = 0; j < clusters[i].indices.size (); j++)
00279       {
00280         avg_normal += normals_filtered_cloud->points[clusters[i].indices[j]].getNormalVector4fMap ();
00281         avg_centroid += normals_filtered_cloud->points[clusters[i].indices[j]].getVector4fMap ();
00282       }
00283 
00284       avg_normal /= static_cast<float> (clusters[i].indices.size ());
00285       avg_centroid /= static_cast<float> (clusters[i].indices.size ());
00286 
00287       Eigen::Vector4f centroid_test;
00288       pcl::compute3DCentroid (*normals_filtered_cloud, centroid_test);
00289       avg_normal.normalize ();
00290 
00291       Eigen::Vector3f avg_norm (avg_normal[0], avg_normal[1], avg_normal[2]);
00292       Eigen::Vector3f avg_dominant_centroid (avg_centroid[0], avg_centroid[1], avg_centroid[2]);
00293 
00294       //append normal and centroid for the clusters
00295       dominant_normals_.push_back (avg_norm);
00296       centroids_dominant_orientations_.push_back (avg_dominant_centroid);
00297     }
00298 
00299     //compute modified VFH for all dominant clusters and add them to the list!
00300     output.points.resize (dominant_normals_.size ());
00301     output.width = static_cast<uint32_t> (dominant_normals_.size ());
00302 
00303     for (size_t i = 0; i < dominant_normals_.size (); ++i)
00304     {
00305       //configure VFH computation for CVFH
00306       vfh.setNormalToUse (dominant_normals_[i]);
00307       vfh.setCentroidToUse (centroids_dominant_orientations_[i]);
00308       pcl::PointCloud<pcl::VFHSignature308> vfh_signature;
00309       vfh.compute (vfh_signature);
00310       output.points[i] = vfh_signature.points[0];
00311     }
00312   }
00313   else
00314   { // ---[ Step 1b.1 : If no, compute CVFH using all the object points
00315     Eigen::Vector4f avg_centroid;
00316     pcl::compute3DCentroid (*surface_, avg_centroid);
00317     Eigen::Vector3f cloud_centroid (avg_centroid[0], avg_centroid[1], avg_centroid[2]);
00318     centroids_dominant_orientations_.push_back (cloud_centroid);
00319 
00320     //configure VFH computation for CVFH using all object points
00321     vfh.setCentroidToUse (cloud_centroid);
00322     vfh.setUseGivenNormal (false);
00323 
00324     pcl::PointCloud<pcl::VFHSignature308> vfh_signature;
00325     vfh.compute (vfh_signature);
00326 
00327     output.points.resize (1);
00328     output.width = 1;
00329 
00330     output.points[0] = vfh_signature.points[0];
00331   }
00332 }
00333 
00334 #define PCL_INSTANTIATE_CVFHEstimation(T,NT,OutT) template class PCL_EXPORTS pcl::CVFHEstimation<T,NT,OutT>;
00335 
00336 #endif    // PCL_FEATURES_IMPL_VFH_H_ 


pcl
Author(s): Open Perception
autogenerated on Mon Oct 6 2014 03:14:48