SamplingSurfaceNormal.cpp
Go to the documentation of this file.
00001 // kate: replace-tabs off; indent-width 4; indent-mode normal
00002 // vim: ts=4:sw=4:noexpandtab
00003 /*
00004 
00005 Copyright (c) 2010--2018,
00006 François Pomerleau and Stephane Magnenat, ASL, ETHZ, Switzerland
00007 You can contact the authors at <f dot pomerleau at gmail dot com> and
00008 <stephane at magnenat dot net>
00009 
00010 All rights reserved.
00011 
00012 Redistribution and use in source and binary forms, with or without
00013 modification, are permitted provided that the following conditions are met:
00014     * Redistributions of source code must retain the above copyright
00015       notice, this list of conditions and the following disclaimer.
00016     * Redistributions in binary form must reproduce the above copyright
00017       notice, this list of conditions and the following disclaimer in the
00018       documentation and/or other materials provided with the distribution.
00019     * Neither the name of the <organization> nor the
00020       names of its contributors may be used to endorse or promote products
00021       derived from this software without specific prior written permission.
00022 
00023 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
00024 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
00025 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
00026 DISCLAIMED. IN NO EVENT SHALL ETH-ASL BE LIABLE FOR ANY
00027 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
00028 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00029 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
00030 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
00031 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00032 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00033 
00034 */
00035 #include "SamplingSurfaceNormal.h"
00036 
00037 #include "utils.h"
00038 
00039 // Eigenvalues
00040 #include "Eigen/QR"
00041 #include "Eigen/Eigenvalues"
00042 
00043 #include "PointMatcherPrivate.h"
00044 
00045 #include <utility>
00046 #include <algorithm>
00047 
00048 // SamplingSurfaceNormalDataPointsFilter
00049 
00050 // Constructor
00051 template<typename T>
00052 SamplingSurfaceNormalDataPointsFilter<T>::SamplingSurfaceNormalDataPointsFilter(
00053         const Parameters& params):
00054         PointMatcher<T>::DataPointsFilter("SamplingSurfaceNormalDataPointsFilter",
00055                 SamplingSurfaceNormalDataPointsFilter::availableParameters(), params),
00056         ratio(Parametrizable::get<T>("ratio")),
00057         knn(Parametrizable::get<int>("knn")),
00058         samplingMethod(Parametrizable::get<int>("samplingMethod")),
00059         maxBoxDim(Parametrizable::get<T>("maxBoxDim")),
00060         averageExistingDescriptors(Parametrizable::get<bool>("averageExistingDescriptors")),
00061         keepNormals(Parametrizable::get<bool>("keepNormals")),
00062         keepDensities(Parametrizable::get<bool>("keepDensities")),
00063         keepEigenValues(Parametrizable::get<bool>("keepEigenValues")),
00064         keepEigenVectors(Parametrizable::get<bool>("keepEigenVectors"))
00065 {
00066 }
00067 
00068 // Compute
00069 template<typename T>
00070 typename PointMatcher<T>::DataPoints 
00071 SamplingSurfaceNormalDataPointsFilter<T>::filter(
00072         const DataPoints& input)
00073 {
00074         DataPoints output(input);
00075         inPlaceFilter(output);
00076         return output;
00077 }
00078 
00079 // In-place filter
00080 template<typename T>
00081 void SamplingSurfaceNormalDataPointsFilter<T>::inPlaceFilter(
00082         DataPoints& cloud)
00083 {
00084         typedef typename DataPoints::View View;
00085         typedef typename DataPoints::Label Label;
00086         typedef typename DataPoints::Labels Labels;
00087 
00088         const int pointsCount(cloud.features.cols());
00089         const int featDim(cloud.features.rows());
00090         const int descDim(cloud.descriptors.rows());
00091         const unsigned int labelDim(cloud.descriptorLabels.size());
00092 
00093         int insertDim(0);
00094         if (averageExistingDescriptors)
00095         {
00096                 // TODO: this should be in the form of an assert
00097                 // Validate descriptors and labels
00098                 for(unsigned int i = 0; i < labelDim ; ++i)
00099                         insertDim += cloud.descriptorLabels[i].span;
00100                 if (insertDim != descDim)
00101                         throw InvalidField("SamplingSurfaceNormalDataPointsFilter: Error, descriptor labels do not match descriptor data");
00102         }
00103 
00104         // Compute space requirement for new descriptors
00105         const int dimNormals(featDim-1);
00106         const int dimDensities(1);
00107         const int dimEigValues(featDim-1);
00108         const int dimEigVectors((featDim-1)*(featDim-1));
00109 
00110         // Allocate space for new descriptors
00111         Labels cloudLabels;
00112         if (keepNormals)
00113                 cloudLabels.push_back(Label("normals", dimNormals));
00114         if (keepDensities)
00115                 cloudLabels.push_back(Label("densities", dimDensities));
00116         if (keepEigenValues)
00117                 cloudLabels.push_back(Label("eigValues", dimEigValues));
00118         if (keepEigenVectors)
00119                 cloudLabels.push_back(Label("eigVectors", dimEigVectors));
00120         cloud.allocateDescriptors(cloudLabels);
00121 
00122         // we keep build data on stack for reentrant behaviour
00123         View cloudExistingDescriptors(cloud.descriptors.block(0,0,cloud.descriptors.rows(),cloud.descriptors.cols()));
00124         BuildData buildData(cloud.features, cloud.descriptors);
00125 
00126         // get views
00127         if (keepNormals)
00128                 buildData.normals = cloud.getDescriptorViewByName("normals");
00129         if (keepDensities)
00130                 buildData.densities = cloud.getDescriptorViewByName("densities");
00131         if (keepEigenValues)
00132                 buildData.eigenValues = cloud.getDescriptorViewByName("eigValues");
00133         if (keepEigenVectors)
00134                 buildData.eigenVectors = cloud.getDescriptorViewByName("eigVectors");
00135         // build the new point cloud
00136         buildNew(
00137                 buildData,
00138                 0,
00139                 pointsCount,
00140                 cloud.features.rowwise().minCoeff(),
00141                 cloud.features.rowwise().maxCoeff()
00142         );
00143 
00144         // Bring the data we keep to the front of the arrays then
00145         // wipe the leftover unused space.
00146         std::sort(buildData.indicesToKeep.begin(), buildData.indicesToKeep.end());
00147         const int ptsOut = buildData.indicesToKeep.size();
00148         for (int i = 0; i < ptsOut; ++i)
00149         {
00150                 const int k = buildData.indicesToKeep[i];
00151                 assert(i <= k);
00152                 cloud.features.col(i) = cloud.features.col(k);
00153                 if (cloud.descriptors.rows() != 0)
00154                         cloud.descriptors.col(i) = cloud.descriptors.col(k);
00155                 if(keepNormals)
00156                         buildData.normals->col(i) = buildData.normals->col(k);
00157                 if(keepDensities)
00158                         (*buildData.densities)(0,i) = (*buildData.densities)(0,k);
00159                 if(keepEigenValues)
00160                         buildData.eigenValues->col(i) = buildData.eigenValues->col(k);
00161                 if(keepEigenVectors)
00162                         buildData.eigenVectors->col(i) = buildData.eigenVectors->col(k);
00163         }
00164         cloud.features.conservativeResize(Eigen::NoChange, ptsOut);
00165         cloud.descriptors.conservativeResize(Eigen::NoChange, ptsOut);
00166 
00167         // warning if some points were dropped
00168         if(buildData.unfitPointsCount != 0)
00169                 LOG_INFO_STREAM("  SamplingSurfaceNormalDataPointsFilter - Could not compute normal for " << buildData.unfitPointsCount << " pts.");
00170 }
00171 
00172 template<typename T>
00173 void SamplingSurfaceNormalDataPointsFilter<T>::buildNew(
00174         BuildData& data, const int first, const int last, 
00175         Vector&& minValues, Vector&& maxValues) const
00176 {
00177         using namespace PointMatcherSupport;
00178         
00179         const int count(last - first);
00180         if (count <= int(knn))
00181         {
00182                 // compute for this range
00183                 fuseRange(data, first, last);
00184                 // TODO: make another filter that creates constant-density clouds,
00185                 // typically by stopping recursion after the median of the bounding cuboid
00186                 // is below a threshold, or that the number of points falls under a threshold
00187                 return;
00188         }
00189 
00190         // find the largest dimension of the box
00191         const int cutDim = argMax<T>(maxValues - minValues);
00192 
00193         // compute number of elements
00194         const int rightCount(count/2);
00195         const int leftCount(count - rightCount);
00196         assert(last - rightCount == first + leftCount);
00197 
00198         // sort, hack std::nth_element
00199         std::nth_element(
00200                 data.indices.begin() + first,
00201                 data.indices.begin() + first + leftCount,
00202                 data.indices.begin() + last,
00203                 CompareDim(cutDim, data)
00204         );
00205 
00206         // get value
00207         const int cutIndex(data.indices[first+leftCount]);
00208         const T cutVal(data.features(cutDim, cutIndex));
00209 
00210         // update bounds for left
00211         Vector leftMaxValues(maxValues);
00212         leftMaxValues[cutDim] = cutVal;
00213         // update bounds for right
00214         Vector rightMinValues(minValues);
00215         rightMinValues[cutDim] = cutVal;
00216 
00217         // recurse
00218         buildNew(data, first, first + leftCount, 
00219                 std::forward<Vector>(minValues), std::move(leftMaxValues));
00220         buildNew(data, first + leftCount, last, 
00221                 std::move(rightMinValues), std::forward<Vector>(maxValues));
00222 }
00223 
00224 template<typename T>
00225 void SamplingSurfaceNormalDataPointsFilter<T>::fuseRange(
00226         BuildData& data, const int first, const int last) const
00227 {
00228         using namespace PointMatcherSupport;
00229         
00230         const int colCount(last-first);
00231         const int featDim(data.features.rows());
00232 
00233         // build nearest neighbors list
00234         Matrix d(featDim-1, colCount);
00235         for (int i = 0; i < colCount; ++i)
00236                 d.col(i) = data.features.block(0,data.indices[first+i],featDim-1, 1);
00237         const Vector box = d.rowwise().maxCoeff() - d.rowwise().minCoeff();
00238         const T boxDim(box.maxCoeff());
00239         // drop box if it is too large
00240         if (boxDim > maxBoxDim)
00241         {
00242                 data.unfitPointsCount += colCount;
00243                 return;
00244         }
00245         const Vector mean = d.rowwise().sum() / T(colCount);
00246         const Matrix NN = (d.colwise() - mean);
00247 
00248         // compute covariance
00249         const Matrix C(NN * NN.transpose());
00250         Vector eigenVa = Vector::Identity(featDim-1, 1);
00251         Matrix eigenVe = Matrix::Identity(featDim-1, featDim-1);
00252         // Ensure that the matrix is suited for eigenvalues calculation
00253         if(keepNormals || keepEigenValues || keepEigenVectors)
00254         {
00255                 if(C.fullPivHouseholderQr().rank()+1 >= featDim-1)
00256                 {
00257                         const Eigen::EigenSolver<Matrix> solver(C);
00258                         eigenVa = solver.eigenvalues().real();
00259                         eigenVe = solver.eigenvectors().real();
00260                 }
00261                 else
00262                 {
00263                         data.unfitPointsCount += colCount;
00264                         return;
00265                 }
00266         }
00267 
00268         Vector normal;
00269         if(keepNormals)
00270                 normal = computeNormal<T>(eigenVa, eigenVe);
00271 
00272         T densitie = 0;
00273         if(keepDensities)
00274                 densitie = computeDensity<T>(NN);
00275 
00276         //if(keepEigenValues) nothing to do
00277 
00278         Vector serialEigVector;
00279         if(keepEigenVectors)
00280                 serialEigVector = serializeEigVec<T>(eigenVe);
00281 
00282         // some safety check
00283         if(data.descriptors.rows() != 0)
00284                 assert(data.descriptors.cols() != 0);
00285 
00286         // Filter points randomly
00287         if(samplingMethod == 0)
00288         {
00289                 for(int i=0; i<colCount; ++i)
00290                 {
00291                         const float r = (float)std::rand()/(float)RAND_MAX;
00292                         if(r < ratio)
00293                         {
00294                                 // Keep points with their descriptors
00295                                 const int k = data.indices[first+i];
00296                                 // Mark the indices which will be part of the final data
00297                                 data.indicesToKeep.push_back(k);
00298 
00299                                 // Build new descriptors
00300                                 if(keepNormals)
00301                                         data.normals->col(k) = normal;
00302                                 if(keepDensities)
00303                                         (*data.densities)(0,k) = densitie;
00304                                 if(keepEigenValues)
00305                                         data.eigenValues->col(k) = eigenVa;
00306                                 if(keepEigenVectors)
00307                                         data.eigenVectors->col(k) = serialEigVector;
00308                         }
00309                 }
00310         }
00311         else
00312         {
00313                 const int k = data.indices[first];
00314                 // Mark the indices which will be part of the final data
00315                 data.indicesToKeep.push_back(k);
00316                 data.features.col(k).topRows(featDim-1) = mean;
00317                 data.features(featDim-1, k) = 1;
00318 
00319                 if(data.descriptors.rows() != 0)
00320                 {
00321                         // average the existing descriptors
00322                         if (averageExistingDescriptors)
00323                         {
00324                                 Vector mergedDesc(Vector::Zero(data.descriptors.rows()));
00325                                 for (int i = 0; i < colCount; ++i)
00326                                         mergedDesc += data.descriptors.col(data.indices[first+i]);
00327                                 mergedDesc /= T(colCount);
00328                                 data.descriptors.col(k) = mergedDesc;
00329                         }
00330                         // else just keep the first one
00331                 }
00332 
00333                 // Build new descriptors
00334                 if(keepNormals)
00335                         data.normals->col(k) = normal;
00336                 if(keepDensities)
00337                         (*data.densities)(0,k) = densitie;
00338                 if(keepEigenValues)
00339                         data.eigenValues->col(k) = eigenVa;
00340                 if(keepEigenVectors)
00341                         data.eigenVectors->col(k) = serialEigVector;
00342         }
00343 }
00344 
00345 template struct SamplingSurfaceNormalDataPointsFilter<float>;
00346 template struct SamplingSurfaceNormalDataPointsFilter<double>;
00347 


libpointmatcher
Author(s):
autogenerated on Thu Jun 20 2019 19:51:32