SamplingSurfaceNormal.cpp
Go to the documentation of this file.
1 // kate: replace-tabs off; indent-width 4; indent-mode normal
2 // vim: ts=4:sw=4:noexpandtab
3 /*
4 
5 Copyright (c) 2010--2018,
6 François Pomerleau and Stephane Magnenat, ASL, ETHZ, Switzerland
7 You can contact the authors at <f dot pomerleau at gmail dot com> and
8 <stephane at magnenat dot net>
9 
10 All rights reserved.
11 
12 Redistribution and use in source and binary forms, with or without
13 modification, are permitted provided that the following conditions are met:
14  * Redistributions of source code must retain the above copyright
15  notice, this list of conditions and the following disclaimer.
16  * Redistributions in binary form must reproduce the above copyright
17  notice, this list of conditions and the following disclaimer in the
18  documentation and/or other materials provided with the distribution.
19  * Neither the name of the <organization> nor the
20  names of its contributors may be used to endorse or promote products
21  derived from this software without specific prior written permission.
22 
23 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
24 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
25 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26 DISCLAIMED. IN NO EVENT SHALL ETH-ASL BE LIABLE FOR ANY
27 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
28 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
29 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
30 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
31 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
32 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 
34 */
35 #include "SamplingSurfaceNormal.h"
37 
38 // Eigenvalues
39 #include "Eigen/QR"
40 #include "Eigen/Eigenvalues"
41 
42 #include "PointMatcherPrivate.h"
43 
44 #include <utility>
45 #include <algorithm>
46 
47 // SamplingSurfaceNormalDataPointsFilter
48 
49 // Constructor
50 template<typename T>
52  const Parameters& params):
53  PointMatcher<T>::DataPointsFilter("SamplingSurfaceNormalDataPointsFilter",
54  SamplingSurfaceNormalDataPointsFilter::availableParameters(), params),
55  ratio(Parametrizable::get<T>("ratio")),
56  knn(Parametrizable::get<int>("knn")),
57  samplingMethod(Parametrizable::get<int>("samplingMethod")),
58  maxBoxDim(Parametrizable::get<T>("maxBoxDim")),
59  averageExistingDescriptors(Parametrizable::get<bool>("averageExistingDescriptors")),
60  keepNormals(Parametrizable::get<bool>("keepNormals")),
61  keepDensities(Parametrizable::get<bool>("keepDensities")),
62  keepEigenValues(Parametrizable::get<bool>("keepEigenValues")),
63  keepEigenVectors(Parametrizable::get<bool>("keepEigenVectors"))
64 {
65 }
66 
67 // Compute
68 template<typename T>
71  const DataPoints& input)
72 {
73  DataPoints output(input);
74  inPlaceFilter(output);
75  return output;
76 }
77 
78 // In-place filter
79 template<typename T>
81  DataPoints& cloud)
82 {
83  typedef typename DataPoints::View View;
84  typedef typename DataPoints::Label Label;
85  typedef typename DataPoints::Labels Labels;
86 
87  const int pointsCount(cloud.features.cols());
88  const int featDim(cloud.features.rows());
89  const int descDim(cloud.descriptors.rows());
90  const unsigned int labelDim(cloud.descriptorLabels.size());
91 
92  int insertDim(0);
94  {
95  // TODO: this should be in the form of an assert
96  // Validate descriptors and labels
97  for(unsigned int i = 0; i < labelDim ; ++i)
98  insertDim += cloud.descriptorLabels[i].span;
99  if (insertDim != descDim)
100  throw InvalidField("SamplingSurfaceNormalDataPointsFilter: Error, descriptor labels do not match descriptor data");
101  }
102 
103  // Compute space requirement for new descriptors
104  const int dimNormals(featDim-1);
105  const int dimDensities(1);
106  const int dimEigValues(featDim-1);
107  const int dimEigVectors((featDim-1)*(featDim-1));
108 
109  // Allocate space for new descriptors
110  Labels cloudLabels;
111  if (keepNormals)
112  cloudLabels.push_back(Label("normals", dimNormals));
113  if (keepDensities)
114  cloudLabels.push_back(Label("densities", dimDensities));
115  if (keepEigenValues)
116  cloudLabels.push_back(Label("eigValues", dimEigValues));
117  if (keepEigenVectors)
118  cloudLabels.push_back(Label("eigVectors", dimEigVectors));
119  cloud.allocateDescriptors(cloudLabels);
120 
121  // we keep build data on stack for reentrant behaviour
122  View cloudExistingDescriptors(cloud.descriptors.block(0,0,cloud.descriptors.rows(),cloud.descriptors.cols()));
123  BuildData buildData(cloud.features, cloud.descriptors);
124 
125  // get views
126  if (keepNormals)
127  buildData.normals = cloud.getDescriptorViewByName("normals");
128  if (keepDensities)
129  buildData.densities = cloud.getDescriptorViewByName("densities");
130  if (keepEigenValues)
131  buildData.eigenValues = cloud.getDescriptorViewByName("eigValues");
132  if (keepEigenVectors)
133  buildData.eigenVectors = cloud.getDescriptorViewByName("eigVectors");
134  // build the new point cloud
135  buildNew(
136  buildData,
137  0,
138  pointsCount,
139  cloud.features.rowwise().minCoeff(),
140  cloud.features.rowwise().maxCoeff()
141  );
142 
143  // Bring the data we keep to the front of the arrays then
144  // wipe the leftover unused space.
145  std::sort(buildData.indicesToKeep.begin(), buildData.indicesToKeep.end());
146  const int ptsOut = buildData.indicesToKeep.size();
147  for (int i = 0; i < ptsOut; ++i)
148  {
149  const int k = buildData.indicesToKeep[i];
150  assert(i <= k);
151  cloud.features.col(i) = cloud.features.col(k);
152  if (cloud.descriptors.rows() != 0)
153  cloud.descriptors.col(i) = cloud.descriptors.col(k);
154  if (cloud.times.rows() != 0)
155  cloud.times.col(i) = cloud.times.col(k);
156  if(keepNormals)
157  buildData.normals->col(i) = buildData.normals->col(k);
158  if(keepDensities)
159  (*buildData.densities)(0,i) = (*buildData.densities)(0,k);
160  if(keepEigenValues)
161  buildData.eigenValues->col(i) = buildData.eigenValues->col(k);
162  if(keepEigenVectors)
163  buildData.eigenVectors->col(i) = buildData.eigenVectors->col(k);
164  }
165  cloud.conservativeResize(ptsOut);
166  // warning if some points were dropped
167  if(buildData.unfitPointsCount != 0)
168  LOG_INFO_STREAM(" SamplingSurfaceNormalDataPointsFilter - Could not compute normal for " << buildData.unfitPointsCount << " pts.");
169 }
170 
171 template<typename T>
173  BuildData& data, const int first, const int last,
174  Vector&& minValues, Vector&& maxValues) const
175 {
176  using namespace PointMatcherSupport;
177 
178  const int count(last - first);
179  if (count <= int(knn))
180  {
181  // compute for this range
182  fuseRange(data, first, last);
183  // TODO: make another filter that creates constant-density clouds,
184  // typically by stopping recursion after the median of the bounding cuboid
185  // is below a threshold, or that the number of points falls under a threshold
186  return;
187  }
188 
189  // find the largest dimension of the box
190  const int cutDim = argMax<T>(maxValues - minValues);
191 
192  // compute number of elements
193  const int rightCount(count/2);
194  const int leftCount(count - rightCount);
195  assert(last - rightCount == first + leftCount);
196 
197  // sort, hack std::nth_element
198  std::nth_element(
199  data.indices.begin() + first,
200  data.indices.begin() + first + leftCount,
201  data.indices.begin() + last,
202  CompareDim(cutDim, data)
203  );
204 
205  // get value
206  const int cutIndex(data.indices[first+leftCount]);
207  const T cutVal(data.features(cutDim, cutIndex));
208 
209  // update bounds for left
210  Vector leftMaxValues(maxValues);
211  leftMaxValues[cutDim] = cutVal;
212  // update bounds for right
213  Vector rightMinValues(minValues);
214  rightMinValues[cutDim] = cutVal;
215 
216  // recurse
217  buildNew(data, first, first + leftCount,
218  std::forward<Vector>(minValues), std::move(leftMaxValues));
219  buildNew(data, first + leftCount, last,
220  std::move(rightMinValues), std::forward<Vector>(maxValues));
221 }
222 
223 template<typename T>
225  BuildData& data, const int first, const int last) const
226 {
227  using namespace PointMatcherSupport;
228 
229  const int colCount(last-first);
230  const int featDim(data.features.rows());
231 
232  // build nearest neighbors list
233  Matrix d(featDim-1, colCount);
234  for (int i = 0; i < colCount; ++i)
235  d.col(i) = data.features.block(0,data.indices[first+i],featDim-1, 1);
236  const Vector box = d.rowwise().maxCoeff() - d.rowwise().minCoeff();
237  const T boxDim(box.maxCoeff());
238  // drop box if it is too large
239  if (boxDim > maxBoxDim)
240  {
241  data.unfitPointsCount += colCount;
242  return;
243  }
244  const Vector mean = d.rowwise().sum() / T(colCount);
245  const Matrix NN = (d.colwise() - mean);
246 
247  // compute covariance
248  const Matrix C((NN * NN.transpose()) / T(colCount));
249  Vector eigenVa = Vector::Identity(featDim-1, 1);
250  Matrix eigenVe = Matrix::Identity(featDim-1, featDim-1);
251  // Ensure that the matrix is suited for eigenvalues calculation
253  {
254  if(C.fullPivHouseholderQr().rank()+1 >= featDim-1)
255  {
256  const Eigen::EigenSolver<Matrix> solver(C);
257  eigenVa = solver.eigenvalues().real();
258  eigenVe = solver.eigenvectors().real();
259  }
260  else
261  {
262  data.unfitPointsCount += colCount;
263  return;
264  }
265  }
266 
267  Vector normal;
268  if(keepNormals)
269  normal = computeNormal<T>(eigenVa, eigenVe);
270 
271  T densitie = 0;
272  if(keepDensities)
273  densitie = computeDensity<T>(NN);
274 
275  //if(keepEigenValues) nothing to do
276 
277  Vector serialEigVector;
278  if(keepEigenVectors)
279  serialEigVector = serializeEigVec<T>(eigenVe);
280 
281  // some safety check
282  if(data.descriptors.rows() != 0)
283  assert(data.descriptors.cols() != 0);
284 
285  // Filter points randomly
286  if(samplingMethod == 0)
287  {
288  for(int i=0; i<colCount; ++i)
289  {
290  const float r = (float)std::rand()/(float)RAND_MAX;
291  if(r < ratio)
292  {
293  // Keep points with their descriptors
294  const int k = data.indices[first+i];
295  // Mark the indices which will be part of the final data
296  data.indicesToKeep.push_back(k);
297 
298  // Build new descriptors
299  if(keepNormals)
300  data.normals->col(k) = normal;
301  if(keepDensities)
302  (*data.densities)(0,k) = densitie;
303  if(keepEigenValues)
304  data.eigenValues->col(k) = eigenVa;
305  if(keepEigenVectors)
306  data.eigenVectors->col(k) = serialEigVector;
307  }
308  }
309  }
310  else
311  {
312  const int k = data.indices[first];
313  // Mark the indices which will be part of the final data
314  data.indicesToKeep.push_back(k);
315  data.features.col(k).topRows(featDim-1) = mean;
316  data.features(featDim-1, k) = 1;
317 
318  if(data.descriptors.rows() != 0)
319  {
320  // average the existing descriptors
322  {
323  Vector mergedDesc(Vector::Zero(data.descriptors.rows()));
324  for (int i = 0; i < colCount; ++i)
325  mergedDesc += data.descriptors.col(data.indices[first+i]);
326  mergedDesc /= T(colCount);
327  data.descriptors.col(k) = mergedDesc;
328  }
329  // else just keep the first one
330  }
331 
332  // Build new descriptors
333  if(keepNormals)
334  data.normals->col(k) = normal;
335  if(keepDensities)
336  (*data.densities)(0,k) = densitie;
337  if(keepEigenValues)
338  data.eigenValues->col(k) = eigenVa;
339  if(keepEigenVectors)
340  data.eigenVectors->col(k) = serialEigVector;
341  }
342 }
343 
346 
#define LOG_INFO_STREAM(args)
DataPoints::Label Label
Matrix descriptors
descriptors of points in the cloud, might be empty
Definition: PointMatcher.h:333
void buildNew(BuildData &data, const int first, const int last, Vector &&minValues, Vector &&maxValues) const
ConstView getDescriptorViewByName(const std::string &name) const
Get a const view on a descriptor by name, throw an exception if it does not exist.
data
Definition: icp.py:50
The name for a certain number of dim.
Definition: PointMatcher.h:221
PointMatcher< T >::DataPoints::InvalidField InvalidField
Functions and classes that are not dependant on scalar type are defined in this namespace.
DataPoints::Labels Labels
Functions and classes that are dependant on scalar type are defined in this templatized class...
Definition: PointMatcher.h:130
Sampling surface normals. First decimate the space until there is at most knn points, then find the center of mass and use the points to estimate nromal using eigen-decomposition.
virtual DataPoints filter(const DataPoints &input)
virtual void inPlaceFilter(DataPoints &cloud)
const M::mapped_type & get(const M &m, const typename M::key_type &k)
The superclass of classes that are constructed using generic parameters. This class provides the para...
void conservativeResize(Index pointCount)
Resize the cloud to pointCount points, conserving existing ones.
Eigen::Block< Matrix > View
A view on a feature or descriptor.
Definition: PointMatcher.h:210
Int64Matrix times
time associated to each points, might be empty
Definition: PointMatcher.h:335
Matrix features
features of points in the cloud
Definition: PointMatcher.h:331
PM::DataPointsFilter DataPointsFilter
SamplingSurfaceNormalDataPointsFilter(const Parameters &params=Parameters())
void allocateDescriptors(const Labels &newLabels)
Make sure a vector of descriptors of given names exist.
void fuseRange(BuildData &data, const int first, const int last) const
Labels descriptorLabels
labels of descriptors
Definition: PointMatcher.h:334


libpointmatcher
Author(s):
autogenerated on Sat May 27 2023 02:38:03