pfh.hpp
Go to the documentation of this file.
00001 /*
00002  * Software License Agreement (BSD License)
00003  *
00004  *  Point Cloud Library (PCL) - www.pointclouds.org
00005  *  Copyright (c) 2010-2011, Willow Garage, Inc.
00006  *  Copyright (c) 2012-, Open Perception, Inc.
00007  *
00008  *  All rights reserved.
00009  *
00010  *  Redistribution and use in source and binary forms, with or without
00011  *  modification, are permitted provided that the following conditions
00012  *  are met:
00013  *
00014  *   * Redistributions of source code must retain the above copyright
00015  *     notice, this list of conditions and the following disclaimer.
00016  *   * Redistributions in binary form must reproduce the above
00017  *     copyright notice, this list of conditions and the following
00018  *     disclaimer in the documentation and/or other materials provided
00019  *     with the distribution.
00020  *   * Neither the name of the copyright holder(s) nor the names of its
00021  *     contributors may be used to endorse or promote products derived
00022  *     from this software without specific prior written permission.
00023  *
00024  *  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00025  *  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00026  *  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00027  *  FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00028  *  COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00029  *  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00030  *  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00031  *  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00032  *  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00033  *  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00034  *  ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00035  *  POSSIBILITY OF SUCH DAMAGE.
00036  *
00037  */
00038 
00039 #ifndef PCL_FEATURES_IMPL_PFH_H_
00040 #define PCL_FEATURES_IMPL_PFH_H_
00041 
00042 #include <pcl/features/pfh.h>
00043 
00045 template <typename PointInT, typename PointNT, typename PointOutT> bool
00046 pcl::PFHEstimation<PointInT, PointNT, PointOutT>::computePairFeatures (
00047       const pcl::PointCloud<PointInT> &cloud, const pcl::PointCloud<PointNT> &normals,
00048       int p_idx, int q_idx, float &f1, float &f2, float &f3, float &f4)
00049 {
00050   pcl::computePairFeatures (cloud.points[p_idx].getVector4fMap (), normals.points[p_idx].getNormalVector4fMap (),
00051                             cloud.points[q_idx].getVector4fMap (), normals.points[q_idx].getNormalVector4fMap (),
00052                             f1, f2, f3, f4);
00053   return (true);
00054 }
00055 
00057 template <typename PointInT, typename PointNT, typename PointOutT> void
00058 pcl::PFHEstimation<PointInT, PointNT, PointOutT>::computePointPFHSignature (
00059       const pcl::PointCloud<PointInT> &cloud, const pcl::PointCloud<PointNT> &normals,
00060       const std::vector<int> &indices, int nr_split, Eigen::VectorXf &pfh_histogram)
00061 {
00062   int h_index, h_p;
00063 
00064   // Clear the resultant point histogram
00065   pfh_histogram.setZero ();
00066 
00067   // Factorization constant
00068   float hist_incr = 100.0f / static_cast<float> (indices.size () * (indices.size () - 1) / 2);
00069 
00070   std::pair<int, int> key;
00071   // Iterate over all the points in the neighborhood
00072   for (size_t i_idx = 0; i_idx < indices.size (); ++i_idx)
00073   {
00074     for (size_t j_idx = 0; j_idx < i_idx; ++j_idx)
00075     {
00076       // If the 3D points are invalid, don't bother estimating, just continue
00077       if (!isFinite (cloud.points[indices[i_idx]]) || !isFinite (cloud.points[indices[j_idx]]))
00078         continue;
00079 
00080       if (use_cache_)
00081       {
00082         // In order to create the key, always use the smaller index as the first key pair member
00083         int p1, p2;
00084   //      if (indices[i_idx] >= indices[j_idx])
00085   //      {
00086           p1 = indices[i_idx];
00087           p2 = indices[j_idx];
00088   //      }
00089   //      else
00090   //      {
00091   //        p1 = indices[j_idx];
00092   //        p2 = indices[i_idx];
00093   //      }
00094         key = std::pair<int, int> (p1, p2);
00095 
00096         // Check to see if we already estimated this pair in the global hashmap
00097         std::map<std::pair<int, int>, Eigen::Vector4f, std::less<std::pair<int, int> >, Eigen::aligned_allocator<Eigen::Vector4f> >::iterator fm_it = feature_map_.find (key);
00098         if (fm_it != feature_map_.end ())
00099           pfh_tuple_ = fm_it->second;
00100         else
00101         {
00102           // Compute the pair NNi to NNj
00103           if (!computePairFeatures (cloud, normals, indices[i_idx], indices[j_idx],
00104                                     pfh_tuple_[0], pfh_tuple_[1], pfh_tuple_[2], pfh_tuple_[3]))
00105             continue;
00106         }
00107       }
00108       else
00109         if (!computePairFeatures (cloud, normals, indices[i_idx], indices[j_idx],
00110                                   pfh_tuple_[0], pfh_tuple_[1], pfh_tuple_[2], pfh_tuple_[3]))
00111           continue;
00112 
00113       // Normalize the f1, f2, f3 features and push them in the histogram
00114       f_index_[0] = static_cast<int> (floor (nr_split * ((pfh_tuple_[0] + M_PI) * d_pi_)));
00115       if (f_index_[0] < 0)         f_index_[0] = 0;
00116       if (f_index_[0] >= nr_split) f_index_[0] = nr_split - 1;
00117 
00118       f_index_[1] = static_cast<int> (floor (nr_split * ((pfh_tuple_[1] + 1.0) * 0.5)));
00119       if (f_index_[1] < 0)         f_index_[1] = 0;
00120       if (f_index_[1] >= nr_split) f_index_[1] = nr_split - 1;
00121 
00122       f_index_[2] = static_cast<int> (floor (nr_split * ((pfh_tuple_[2] + 1.0) * 0.5)));
00123       if (f_index_[2] < 0)         f_index_[2] = 0;
00124       if (f_index_[2] >= nr_split) f_index_[2] = nr_split - 1;
00125 
00126       // Copy into the histogram
00127       h_index = 0;
00128       h_p     = 1;
00129       for (int d = 0; d < 3; ++d)
00130       {
00131         h_index += h_p * f_index_[d];
00132         h_p     *= nr_split;
00133       }
00134       pfh_histogram[h_index] += hist_incr;
00135 
00136       if (use_cache_)
00137       {
00138         // Save the value in the hashmap
00139         feature_map_[key] = pfh_tuple_;
00140 
00141         // Use a maximum cache so that we don't go overboard on RAM usage
00142         key_list_.push (key);
00143         // Check to see if we need to remove an element due to exceeding max_size
00144         if (key_list_.size () > max_cache_size_)
00145         {
00146           // Remove the last element.
00147           feature_map_.erase (key_list_.back ());
00148           key_list_.pop ();
00149         }
00150       }
00151     }
00152   }
00153 }
00154 
00156 template <typename PointInT, typename PointNT, typename PointOutT> void
00157 pcl::PFHEstimation<PointInT, PointNT, PointOutT>::computeFeature (PointCloudOut &output)
00158 {
00159   // Clear the feature map
00160   feature_map_.clear ();
00161   std::queue<std::pair<int, int> > empty;
00162   std::swap (key_list_, empty);
00163 
00164   pfh_histogram_.setZero (nr_subdiv_ * nr_subdiv_ * nr_subdiv_);
00165 
00166   // Allocate enough space to hold the results
00167   // \note This resize is irrelevant for a radiusSearch ().
00168   std::vector<int> nn_indices (k_);
00169   std::vector<float> nn_dists (k_);
00170 
00171   output.is_dense = true;
00172   // Save a few cycles by not checking every point for NaN/Inf values if the cloud is set to dense
00173   if (input_->is_dense)
00174   {
00175     // Iterating over the entire index vector
00176     for (size_t idx = 0; idx < indices_->size (); ++idx)
00177     {
00178       if (this->searchForNeighbors ((*indices_)[idx], search_parameter_, nn_indices, nn_dists) == 0)
00179       {
00180         for (int d = 0; d < pfh_histogram_.size (); ++d)
00181           output.points[idx].histogram[d] = std::numeric_limits<float>::quiet_NaN ();
00182 
00183         output.is_dense = false;
00184         continue;
00185       }
00186 
00187       // Estimate the PFH signature at each patch
00188       computePointPFHSignature (*surface_, *normals_, nn_indices, nr_subdiv_, pfh_histogram_);
00189 
00190       // Copy into the resultant cloud
00191       for (int d = 0; d < pfh_histogram_.size (); ++d)
00192         output.points[idx].histogram[d] = pfh_histogram_[d];
00193     }
00194   }
00195   else
00196   {
00197     // Iterating over the entire index vector
00198     for (size_t idx = 0; idx < indices_->size (); ++idx)
00199     {
00200       if (!isFinite ((*input_)[(*indices_)[idx]]) ||
00201           this->searchForNeighbors ((*indices_)[idx], search_parameter_, nn_indices, nn_dists) == 0)
00202       {
00203         for (int d = 0; d < pfh_histogram_.size (); ++d)
00204           output.points[idx].histogram[d] = std::numeric_limits<float>::quiet_NaN ();
00205 
00206         output.is_dense = false;
00207         continue;
00208       }
00209 
00210       // Estimate the PFH signature at each patch
00211       computePointPFHSignature (*surface_, *normals_, nn_indices, nr_subdiv_, pfh_histogram_);
00212 
00213       // Copy into the resultant cloud
00214       for (int d = 0; d < pfh_histogram_.size (); ++d)
00215         output.points[idx].histogram[d] = pfh_histogram_[d];
00216     }
00217   }
00218 }
00219 
00220 #define PCL_INSTANTIATE_PFHEstimation(T,NT,OutT) template class PCL_EXPORTS pcl::PFHEstimation<T,NT,OutT>;
00221 
00222 #endif    // PCL_FEATURES_IMPL_PFH_H_ 
00223 


pcl
Author(s): Open Perception
autogenerated on Wed Aug 26 2015 15:28:14