util3d_surface.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
31 #include "rtabmap/core/util3d.h"
32 #include "rtabmap/core/util2d.h"
33 #include "rtabmap/core/Memory.h"
34 #include "rtabmap/core/DBDriver.h"
38 #include "rtabmap/utilite/UFile.h"
40 #include "rtabmap/utilite/UMath.h"
41 #include "rtabmap/utilite/UTimer.h"
42 #include <opencv2/core/core_c.h>
43 #include <opencv2/imgproc/types_c.h>
44 #include <pcl/search/kdtree.h>
45 #include <pcl/surface/gp3.h>
46 #include <pcl/features/normal_3d_omp.h>
47 #include <pcl/surface/mls.h>
49 #include <pcl/features/integral_image_normal.h>
50 
51 #ifdef RTABMAP_ALICE_VISION
52 #include <aliceVision/sfmData/SfMData.hpp>
53 #include <aliceVision/sfmDataIO/sfmDataIO.hpp>
54 #include <aliceVision/mesh/Mesh.hpp>
55 #include <aliceVision/mesh/Texturing.hpp>
56 #include <aliceVision/camera/Pinhole.hpp>
57 #include <boost/algorithm/string.hpp>
58 using namespace aliceVision;
59 #endif
60 
61 #ifndef DISABLE_VTK
62 #include <pcl/surface/vtk_smoothing/vtk_mesh_quadric_decimation.h>
63 #endif
64 
65 #if PCL_VERSION_COMPARE(>, 1, 11, 1)
66 #include <pcl/types.h>
67 #endif
68 #if PCL_VERSION_COMPARE(<, 1, 8, 0)
70 #else
71 #include <pcl/surface/organized_fast_mesh.h>
72 #include <pcl/surface/impl/marching_cubes.hpp>
73 #include <pcl/surface/impl/organized_fast_mesh.hpp>
74 #include <pcl/impl/instantiate.hpp>
75 #include <pcl/point_types.h>
76 
77 // Instantiations of specific point types
78 PCL_INSTANTIATE(OrganizedFastMesh, (pcl::PointXYZRGBNormal))
79 
80 #include <pcl/features/impl/normal_3d_omp.hpp>
81 #if PCL_VERSION_COMPARE(<=, 1, 8, 0)
82 #ifdef PCL_ONLY_CORE_POINT_TYPES
83 PCL_INSTANTIATE_PRODUCT(NormalEstimationOMP, ((pcl::PointXYZRGB))((pcl::Normal)))
84 #endif
85 #endif
86 #endif
87 
88 namespace rtabmap
89 {
90 
91 namespace util3d
92 {
93 
95  const std::vector<pcl::Vertices> & polygons,
96  int cloudSize,
97  std::vector<std::set<int> > & neighbors,
98  std::vector<std::set<int> > & vertexToPolygons)
99 {
100  vertexToPolygons = std::vector<std::set<int> >(cloudSize);
101  neighbors = std::vector<std::set<int> >(polygons.size());
102 
103  for(unsigned int i=0; i<polygons.size(); ++i)
104  {
105  std::set<int> vertices(polygons[i].vertices.begin(), polygons[i].vertices.end());
106 
107  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
108  {
109  int v = polygons[i].vertices.at(j);
110  for(std::set<int>::iterator iter=vertexToPolygons[v].begin(); iter!=vertexToPolygons[v].end(); ++iter)
111  {
112  int numSharedVertices = 0;
113  for(unsigned int k=0; k<polygons.at(*iter).vertices.size() && numSharedVertices<2; ++k)
114  {
115  if(vertices.find(polygons.at(*iter).vertices.at(k)) != vertices.end())
116  {
117  ++numSharedVertices;
118  }
119  }
120  if(numSharedVertices >= 2)
121  {
122  neighbors[*iter].insert(i);
123  neighbors[i].insert(*iter);
124  }
125  }
126  vertexToPolygons[v].insert(i);
127  }
128  }
129 }
130 
131 std::list<std::list<int> > clusterPolygons(
132  const std::vector<std::set<int> > & neighborPolygons,
133  int minClusterSize)
134 {
135  std::set<int> polygonsChecked;
136 
137  std::list<std::list<int> > clusters;
138 
139  for(unsigned int i=0; i<neighborPolygons.size(); ++i)
140  {
141  if(polygonsChecked.find(i) == polygonsChecked.end())
142  {
143  std::list<int> currentCluster;
144  currentCluster.push_back(i);
145  polygonsChecked.insert(i);
146 
147  for(std::list<int>::iterator iter=currentCluster.begin(); iter!=currentCluster.end(); ++iter)
148  {
149  // get neighbor polygons
150  std::set<int> neighbors = neighborPolygons[*iter];
151  for(std::set<int>::iterator jter=neighbors.begin(); jter!=neighbors.end(); ++jter)
152  {
153  if(polygonsChecked.insert(*jter).second)
154  {
155  currentCluster.push_back(*jter);
156  }
157  }
158  }
159  if((int)currentCluster.size() > minClusterSize)
160  {
161  clusters.push_back(currentCluster);
162  }
163  }
164  }
165  return clusters;
166 }
167 
168 std::vector<pcl::Vertices> organizedFastMesh(
169  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
170  double angleTolerance,
171  bool quad,
172  int trianglePixelSize,
173  const Eigen::Vector3f & viewpoint)
174 {
175  UDEBUG("size=%d angle=%f quad=%d triangleSize=%d", (int)cloud->size(), angleTolerance, quad?1:0, trianglePixelSize);
176  UASSERT(cloud->is_dense == false);
177  UASSERT(cloud->width > 1 && cloud->height > 1);
178 
180  ofm.setTrianglePixelSize (trianglePixelSize);
182  ofm.setInputCloud (cloud);
183  ofm.setAngleTolerance(angleTolerance);
184  ofm.setViewpoint(viewpoint);
185 
186  std::vector<pcl::Vertices> vertices;
187  ofm.reconstruct (vertices);
188 
189  if(quad)
190  {
191  //flip all polygons (right handed)
192  std::vector<pcl::Vertices> output(vertices.size());
193  for(unsigned int i=0; i<vertices.size(); ++i)
194  {
195  output[i].vertices.resize(4);
196  output[i].vertices[0] = vertices[i].vertices[0];
197  output[i].vertices[3] = vertices[i].vertices[1];
198  output[i].vertices[2] = vertices[i].vertices[2];
199  output[i].vertices[1] = vertices[i].vertices[3];
200  }
201  return output;
202  }
203 
204  return vertices;
205 }
206 std::vector<pcl::Vertices> organizedFastMesh(
207  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
208  double angleTolerance,
209  bool quad,
210  int trianglePixelSize,
211  const Eigen::Vector3f & viewpoint)
212 {
213  UDEBUG("size=%d angle=%f quad=%d triangleSize=%d", (int)cloud->size(), angleTolerance, quad?1:0, trianglePixelSize);
214  UASSERT(cloud->is_dense == false);
215  UASSERT(cloud->width > 1 && cloud->height > 1);
216 
218  ofm.setTrianglePixelSize (trianglePixelSize);
220  ofm.setInputCloud (cloud);
221  ofm.setAngleTolerance(angleTolerance);
222  ofm.setViewpoint(viewpoint);
223 
224  std::vector<pcl::Vertices> vertices;
225  ofm.reconstruct (vertices);
226 
227  if(quad)
228  {
229  //flip all polygons (right handed)
230  std::vector<pcl::Vertices> output(vertices.size());
231  for(unsigned int i=0; i<vertices.size(); ++i)
232  {
233  output[i].vertices.resize(4);
234  output[i].vertices[0] = vertices[i].vertices[0];
235  output[i].vertices[3] = vertices[i].vertices[1];
236  output[i].vertices[2] = vertices[i].vertices[2];
237  output[i].vertices[1] = vertices[i].vertices[3];
238  }
239  return output;
240  }
241 
242  return vertices;
243 }
244 std::vector<pcl::Vertices> organizedFastMesh(
245  const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
246  double angleTolerance,
247  bool quad,
248  int trianglePixelSize,
249  const Eigen::Vector3f & viewpoint)
250 {
251  UDEBUG("size=%d angle=%f quad=%d triangleSize=%d", (int)cloud->size(), angleTolerance, quad?1:0, trianglePixelSize);
252  UASSERT(cloud->is_dense == false);
253  UASSERT(cloud->width > 1 && cloud->height > 1);
254 
256  ofm.setTrianglePixelSize (trianglePixelSize);
258  ofm.setInputCloud (cloud);
259  ofm.setAngleTolerance(angleTolerance);
260  ofm.setViewpoint(viewpoint);
261 
262  std::vector<pcl::Vertices> vertices;
263  ofm.reconstruct (vertices);
264 
265  if(quad)
266  {
267  //flip all polygons (right handed)
268  std::vector<pcl::Vertices> output(vertices.size());
269  for(unsigned int i=0; i<vertices.size(); ++i)
270  {
271  output[i].vertices.resize(4);
272  output[i].vertices[0] = vertices[i].vertices[0];
273  output[i].vertices[3] = vertices[i].vertices[1];
274  output[i].vertices[2] = vertices[i].vertices[2];
275  output[i].vertices[1] = vertices[i].vertices[3];
276  }
277  return output;
278  }
279 
280  return vertices;
281 }
282 
284  pcl::PointCloud<pcl::PointXYZRGBNormal> & cloudA,
285  std::vector<pcl::Vertices> & polygonsA,
286  const pcl::PointCloud<pcl::PointXYZRGBNormal> & cloudB,
287  const std::vector<pcl::Vertices> & polygonsB)
288 {
289  UDEBUG("cloudA=%d polygonsA=%d cloudB=%d polygonsB=%d", (int)cloudA.size(), (int)polygonsA.size(), (int)cloudB.size(), (int)polygonsB.size());
290  UASSERT(!cloudA.isOrganized() && !cloudB.isOrganized());
291 
292  int sizeA = (int)cloudA.size();
293  cloudA += cloudB;
294 
295  int sizePolygonsA = (int)polygonsA.size();
296  polygonsA.resize(sizePolygonsA+polygonsB.size());
297 
298  for(unsigned int i=0; i<polygonsB.size(); ++i)
299  {
300  pcl::Vertices vertices = polygonsB[i];
301  for(unsigned int j=0; j<vertices.vertices.size(); ++j)
302  {
303  vertices.vertices[j] += sizeA;
304  }
305  polygonsA[i+sizePolygonsA] = vertices;
306  }
307 }
308 
310  pcl::PointCloud<pcl::PointXYZRGB> & cloudA,
311  std::vector<pcl::Vertices> & polygonsA,
312  const pcl::PointCloud<pcl::PointXYZRGB> & cloudB,
313  const std::vector<pcl::Vertices> & polygonsB)
314 {
315  UDEBUG("cloudA=%d polygonsA=%d cloudB=%d polygonsB=%d", (int)cloudA.size(), (int)polygonsA.size(), (int)cloudB.size(), (int)polygonsB.size());
316  UASSERT(!cloudA.isOrganized() && !cloudB.isOrganized());
317 
318  int sizeA = (int)cloudA.size();
319  cloudA += cloudB;
320 
321  int sizePolygonsA = (int)polygonsA.size();
322  polygonsA.resize(sizePolygonsA+polygonsB.size());
323 
324  for(unsigned int i=0; i<polygonsB.size(); ++i)
325  {
326  pcl::Vertices vertices = polygonsB[i];
327  for(unsigned int j=0; j<vertices.vertices.size(); ++j)
328  {
329  vertices.vertices[j] += sizeA;
330  }
331  polygonsA[i+sizePolygonsA] = vertices;
332  }
333 }
334 
336  const pcl::PointCloud<pcl::PointXYZRGBNormal> & cloud,
337  const std::vector<pcl::Vertices> & polygons,
338  pcl::PointCloud<pcl::PointXYZRGBNormal> & outputCloud,
339  std::vector<pcl::Vertices> & outputPolygons)
340 {
341  UDEBUG("size=%d polygons=%d", (int)cloud.size(), (int)polygons.size());
342  std::map<int, int> addedVertices; //<oldIndex, newIndex>
343  std::vector<int> output; //<oldIndex>
344  output.resize(cloud.size());
345  outputCloud.resize(cloud.size());
346  outputCloud.is_dense = true;
347  outputPolygons.resize(polygons.size());
348  int oi = 0;
349  for(unsigned int i=0; i<polygons.size(); ++i)
350  {
351  pcl::Vertices & v = outputPolygons[i];
352  v.vertices.resize(polygons[i].vertices.size());
353  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
354  {
355  std::map<int, int>::iterator iter = addedVertices.find(polygons[i].vertices[j]);
356  if(iter == addedVertices.end())
357  {
358  outputCloud[oi] = cloud.at(polygons[i].vertices[j]);
359  addedVertices.insert(std::make_pair(polygons[i].vertices[j], oi));
360  output[oi] = polygons[i].vertices[j];
361  v.vertices[j] = oi++;
362  }
363  else
364  {
365  v.vertices[j] = iter->second;
366  }
367  }
368  }
369  outputCloud.resize(oi);
370  output.resize(oi);
371 
372  return output;
373 }
374 
376  const pcl::PointCloud<pcl::PointXYZRGB> & cloud,
377  const std::vector<pcl::Vertices> & polygons,
378  pcl::PointCloud<pcl::PointXYZRGB> & outputCloud,
379  std::vector<pcl::Vertices> & outputPolygons)
380 {
381  UDEBUG("size=%d polygons=%d", (int)cloud.size(), (int)polygons.size());
382  std::map<int, int> addedVertices; //<oldIndex, newIndex>
383  std::vector<int> output; //<oldIndex>
384  output.resize(cloud.size());
385  outputCloud.resize(cloud.size());
386  outputCloud.is_dense = true;
387  outputPolygons.resize(polygons.size());
388  int oi = 0;
389  for(unsigned int i=0; i<polygons.size(); ++i)
390  {
391  pcl::Vertices & v = outputPolygons[i];
392  v.vertices.resize(polygons[i].vertices.size());
393  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
394  {
395  std::map<int, int>::iterator iter = addedVertices.find(polygons[i].vertices[j]);
396  if(iter == addedVertices.end())
397  {
398  outputCloud[oi] = cloud.at(polygons[i].vertices[j]);
399  addedVertices.insert(std::make_pair(polygons[i].vertices[j], oi));
400  output[oi] = polygons[i].vertices[j];
401  v.vertices[j] = oi++;
402  }
403  else
404  {
405  v.vertices[j] = iter->second;
406  }
407  }
408  }
409  outputCloud.resize(oi);
410  output.resize(oi);
411 
412  return output;
413 }
414 
415 std::vector<int> filterNaNPointsFromMesh(
416  const pcl::PointCloud<pcl::PointXYZRGB> & cloud,
417  const std::vector<pcl::Vertices> & polygons,
418  pcl::PointCloud<pcl::PointXYZRGB> & outputCloud,
419  std::vector<pcl::Vertices> & outputPolygons)
420 {
421  UDEBUG("size=%d polygons=%d", (int)cloud.size(), (int)polygons.size());
422  std::map<int, int> addedVertices; //<oldIndex, newIndex>
423  std::vector<int> output; //<oldIndex>
424  output.resize(cloud.size());
425  outputCloud.resize(cloud.size());
426  outputCloud.is_dense = true;
427  std::vector<int> organizedToDense(cloud.size(), -1);
428 
429  int oi = 0;
430  for(unsigned int i=0; i<cloud.size(); ++i)
431  {
432  if(pcl::isFinite(cloud.at(i)))
433  {
434  outputCloud.at(oi) = cloud.at(i);
435  output[oi] = i;
436  organizedToDense[i] = oi;
437  ++oi;
438  }
439  }
440  outputCloud.resize(oi);
441  output.resize(oi);
442 
443  // remap polygons to dense cloud
444  outputPolygons = polygons;
445  for(unsigned int i=0; i<outputPolygons.size(); ++i)
446  {
447  pcl::Vertices & v = outputPolygons[i];
448  for(unsigned int j=0; j<v.vertices.size(); ++j)
449  {
450  UASSERT(organizedToDense[v.vertices[j]] >= 0);
451  v.vertices[j] = organizedToDense[v.vertices[j]];
452  }
453  }
454 
455  return output;
456 }
457 
458 std::vector<pcl::Vertices> filterCloseVerticesFromMesh(
459  const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud,
460  const std::vector<pcl::Vertices> & polygons,
461  float radius,
462  float angle, // FIXME angle not used
463  bool keepLatestInRadius)
464 {
465  UDEBUG("size=%d polygons=%d radius=%f angle=%f keepLatest=%d",
466  (int)cloud->size(), (int)polygons.size(), radius, angle, keepLatestInRadius?1:0);
467  std::vector<pcl::Vertices> outputPolygons;
468  pcl::KdTreeFLANN<pcl::PointXYZRGBNormal>::Ptr kdtree(new pcl::KdTreeFLANN<pcl::PointXYZRGBNormal>);
469  kdtree->setInputCloud(cloud);
470 
471  std::map<int, int> verticesDone;
472  outputPolygons = polygons;
473  for(unsigned int i=0; i<outputPolygons.size(); ++i)
474  {
475  pcl::Vertices & polygon = outputPolygons[i];
476  for(unsigned int j=0; j<polygon.vertices.size(); ++j)
477  {
478  std::map<int, int>::iterator iter = verticesDone.find(polygon.vertices[j]);
479  if(iter != verticesDone.end())
480  {
481  polygon.vertices[j] = iter->second;
482  }
483  else
484  {
485  std::vector<int> kIndices;
486  std::vector<float> kDistances;
487  kdtree->radiusSearch(polygon.vertices[j], radius, kIndices, kDistances);
488  if(kIndices.size())
489  {
490  int reference = -1;
491  for(unsigned int z=0; z<kIndices.size(); ++z)
492  {
493  if(reference == -1)
494  {
495  reference = kIndices[z];
496  }
497  else if(keepLatestInRadius)
498  {
499  if(kIndices[z] < reference)
500  {
501  reference = kIndices[z];
502  }
503  }
504  else
505  {
506  if(kIndices[z] > reference)
507  {
508  reference = kIndices[z];
509  }
510  }
511  }
512  if(reference >= 0)
513  {
514  for(unsigned int z=0; z<kIndices.size(); ++z)
515  {
516  verticesDone.insert(std::make_pair(kIndices[j], reference));
517  }
518  polygon.vertices[j] = reference;
519  }
520  }
521  else
522  {
523  verticesDone.insert(std::make_pair(polygon.vertices[j], polygon.vertices[j]));
524  }
525  }
526  }
527  }
528  return outputPolygons;
529 }
530 
531 std::vector<pcl::Vertices> filterInvalidPolygons(const std::vector<pcl::Vertices> & polygons)
532 {
533  std::vector<pcl::Vertices> output(polygons.size());
534  int oi=0;
535  for(unsigned int i=0; i<polygons.size(); ++i)
536  {
537  bool valid = true;
538  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
539  {
540  if(polygons[i].vertices[j] == polygons[i].vertices[(j+1)%polygons[i].vertices.size()])
541  {
542  valid = false;
543  break;
544  }
545  }
546  if(valid)
547  {
548  output[oi++] = polygons[i];
549  }
550  }
551  output.resize(oi);
552  return output;
553 }
554 
555 pcl::PolygonMesh::Ptr createMesh(
556  const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloudWithNormals,
557  float gp3SearchRadius,
558  float gp3Mu,
559  int gp3MaximumNearestNeighbors,
560  float gp3MaximumSurfaceAngle,
561  float gp3MinimumAngle,
562  float gp3MaximumAngle,
563  bool gp3NormalConsistency)
564 {
565  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloudWithNormalsNoNaN = removeNaNNormalsFromPointCloud(cloudWithNormals);
566 
567  // Create search tree*
568  pcl::search::KdTree<pcl::PointXYZRGBNormal>::Ptr tree2 (new pcl::search::KdTree<pcl::PointXYZRGBNormal>);
569  tree2->setInputCloud (cloudWithNormalsNoNaN);
570 
571  // Initialize objects
572  pcl::GreedyProjectionTriangulation<pcl::PointXYZRGBNormal> gp3;
573  pcl::PolygonMesh::Ptr mesh(new pcl::PolygonMesh);
574 
575  // Set the maximum distance between connected points (maximum edge length)
576  gp3.setSearchRadius (gp3SearchRadius);
577 
578  // Set typical values for the parameters
579  gp3.setMu (gp3Mu);
580  gp3.setMaximumNearestNeighbors (gp3MaximumNearestNeighbors);
581  gp3.setMaximumSurfaceAngle(gp3MaximumSurfaceAngle); // 45 degrees
582  gp3.setMinimumAngle(gp3MinimumAngle); // 10 degrees
583  gp3.setMaximumAngle(gp3MaximumAngle); // 120 degrees
584  gp3.setNormalConsistency(gp3NormalConsistency);
585  gp3.setConsistentVertexOrdering(gp3NormalConsistency);
586 
587  // Get result
588  gp3.setInputCloud (cloudWithNormalsNoNaN);
589  gp3.setSearchMethod (tree2);
590  gp3.reconstruct (*mesh);
591 
592  //UASSERT(mesh->cloud.data.size()/mesh->cloud.point_step == cloudWithNormalsNoNaN->size());
593  //mesh->polygons = normalizePolygonsSide(*cloudWithNormalsNoNaN, mesh->polygons);
594 
595  return mesh;
596 }
597 
599  const std::map<int, Transform> & poses,
600  const std::map<int, std::vector<CameraModel> > & cameraModels,
601  const std::map<int, cv::Mat> & cameraDepths,
602  const std::vector<float> & roiRatios)
603 {
604  UASSERT(roiRatios.empty() || roiRatios.size() == 4);
606 
607  for(std::map<int, Transform>::const_iterator poseIter=poses.begin(); poseIter!=poses.end(); ++poseIter)
608  {
609  std::map<int, std::vector<CameraModel> >::const_iterator modelIter=cameraModels.find(poseIter->first);
610 
611  if(modelIter!=cameraModels.end())
612  {
613  std::map<int, cv::Mat>::const_iterator depthIter = cameraDepths.find(poseIter->first);
614 
615  // for each sub camera
616  for(unsigned int i=0; i<modelIter->second.size(); ++i)
617  {
619  // should be in camera frame
620  UASSERT(!modelIter->second[i].localTransform().isNull() && !poseIter->second.isNull());
621  Transform t = poseIter->second*modelIter->second[i].localTransform();
622 
623  cam.pose = t.toEigen3f();
624 
625  if(modelIter->second[i].imageHeight() <=0 || modelIter->second[i].imageWidth() <=0)
626  {
627  UERROR("Should have camera models with width/height set to create texture cameras!");
629  }
630 
631  UASSERT(modelIter->second[i].fx()>0 && modelIter->second[i].imageHeight()>0 && modelIter->second[i].imageWidth()>0);
632  cam.focal_length_w=modelIter->second[i].fx();
633  cam.focal_length_h=modelIter->second[i].fy();
634  cam.center_w=modelIter->second[i].cx();
635  cam.center_h=modelIter->second[i].cy();
636  cam.height=modelIter->second[i].imageHeight();
637  cam.width=modelIter->second[i].imageWidth();
638  if(modelIter->second.size() == 1)
639  {
640  cam.texture_file = uFormat("%d", poseIter->first); // camera index
641  }
642  else
643  {
644  cam.texture_file = uFormat("%d_%d", poseIter->first, (int)i); // camera index, sub camera model index
645  }
646  if(!roiRatios.empty())
647  {
648  cam.roi.resize(4);
649  cam.roi[0] = cam.width * roiRatios[0]; // left -> x
650  cam.roi[1] = cam.height * roiRatios[2]; // top -> y
651  cam.roi[2] = cam.width * (1.0 - roiRatios[1]) - cam.roi[0]; // right -> width
652  cam.roi[3] = cam.height * (1.0 - roiRatios[3]) - cam.roi[1]; // bottom -> height
653  }
654 
655  if(depthIter != cameraDepths.end() && !depthIter->second.empty())
656  {
657  UASSERT(depthIter->second.type() == CV_32FC1 || depthIter->second.type() == CV_16UC1);
658  UASSERT(depthIter->second.cols % modelIter->second.size() == 0);
659  int subWidth = depthIter->second.cols/(modelIter->second.size());
660  cam.depth = cv::Mat(depthIter->second, cv::Range(0, depthIter->second.rows), cv::Range(subWidth*i, subWidth*(i+1)));
661  }
662 
663  UDEBUG("%f", cam.focal_length);
664  UDEBUG("%f", cam.height);
665  UDEBUG("%f", cam.width);
666  UDEBUG("cam.pose=%s", t.prettyPrint().c_str());
667 
668  cameras.push_back(cam);
669  }
670  }
671  }
672  return cameras;
673 }
674 
675 pcl::TextureMesh::Ptr createTextureMesh(
676  const pcl::PolygonMesh::Ptr & mesh,
677  const std::map<int, Transform> & poses,
678  const std::map<int, CameraModel> & cameraModels,
679  const std::map<int, cv::Mat> & cameraDepths,
680  float maxDistance,
681  float maxDepthError,
682  float maxAngle,
683  int minClusterSize,
684  const std::vector<float> & roiRatios,
685  const ProgressState * state,
686  std::vector<std::map<int, pcl::PointXY> > * vertexToPixels,
687  bool distanceToCamPolicy)
688 {
689  std::map<int, std::vector<CameraModel> > cameraSubModels;
690  for(std::map<int, CameraModel>::const_iterator iter=cameraModels.begin(); iter!=cameraModels.end(); ++iter)
691  {
692  std::vector<CameraModel> models;
693  models.push_back(iter->second);
694  cameraSubModels.insert(std::make_pair(iter->first, models));
695  }
696 
697  return createTextureMesh(
698  mesh,
699  poses,
700  cameraSubModels,
701  cameraDepths,
702  maxDistance,
704  maxAngle,
705  minClusterSize,
706  roiRatios,
707  state,
708  vertexToPixels,
709  distanceToCamPolicy);
710 }
711 
712 pcl::TextureMesh::Ptr createTextureMesh(
713  const pcl::PolygonMesh::Ptr & mesh,
714  const std::map<int, Transform> & poses,
715  const std::map<int, std::vector<CameraModel> > & cameraModels,
716  const std::map<int, cv::Mat> & cameraDepths,
717  float maxDistance,
718  float maxDepthError,
719  float maxAngle,
720  int minClusterSize,
721  const std::vector<float> & roiRatios,
722  const ProgressState * state,
723  std::vector<std::map<int, pcl::PointXY> > * vertexToPixels,
724  bool distanceToCamPolicy)
725 {
726  UASSERT(mesh->polygons.size());
727  pcl::TextureMesh::Ptr textureMesh(new pcl::TextureMesh);
728  textureMesh->cloud = mesh->cloud;
729  textureMesh->tex_polygons.push_back(mesh->polygons);
730 
731  // Original from pcl/gpu/kinfu_large_scale/tools/standalone_texture_mapping.cpp:
732  // Author: Raphael Favier, Technical University Eindhoven, (r.mysurname <aT> tue.nl)
733 
734  // Create the texturemesh object that will contain our UV-mapped mesh
735 
736  // create cameras
738  poses,
739  cameraModels,
740  cameraDepths,
741  roiRatios);
742 
743  // Create materials for each texture (and one extra for occluded faces)
744  textureMesh->tex_materials.resize (cameras.size () + 1);
745  for(unsigned int i = 0 ; i <= cameras.size() ; ++i)
746  {
747  pcl::TexMaterial mesh_material;
748  mesh_material.tex_Ka.r = 0.2f;
749  mesh_material.tex_Ka.g = 0.2f;
750  mesh_material.tex_Ka.b = 0.2f;
751 
752  mesh_material.tex_Kd.r = 0.8f;
753  mesh_material.tex_Kd.g = 0.8f;
754  mesh_material.tex_Kd.b = 0.8f;
755 
756  mesh_material.tex_Ks.r = 1.0f;
757  mesh_material.tex_Ks.g = 1.0f;
758  mesh_material.tex_Ks.b = 1.0f;
759 
760  mesh_material.tex_d = 1.0f;
761  mesh_material.tex_Ns = 75.0f;
762  mesh_material.tex_illum = 2;
763 
764  std::stringstream tex_name;
765  tex_name << "material_" << i;
766  tex_name >> mesh_material.tex_name;
767 
768  if(i < cameras.size ())
769  {
770  mesh_material.tex_file = cameras[i].texture_file;
771  }
772  else
773  {
774  mesh_material.tex_file = "occluded";
775  }
776 
777  textureMesh->tex_materials[i] = mesh_material;
778  }
779 
780  // Texture by projection
781  pcl::TextureMapping<pcl::PointXYZ> tm; // TextureMapping object that will perform the sort
782  tm.setMaxDistance(maxDistance);
783  tm.setMaxAngle(maxAngle);
784  if(maxDepthError > 0.0f)
785  {
787  }
788  tm.setMinClusterSize(minClusterSize);
789  if(tm.textureMeshwithMultipleCameras2(*textureMesh, cameras, state, vertexToPixels, distanceToCamPolicy))
790  {
791  // compute normals for the mesh if not already here
792  bool hasNormals = false;
793  bool hasColors = false;
794  for(unsigned int i=0; i<textureMesh->cloud.fields.size(); ++i)
795  {
796  if(textureMesh->cloud.fields[i].name.compare("normal_x") == 0)
797  {
798  hasNormals = true;
799  }
800  else if(textureMesh->cloud.fields[i].name.compare("rgb") == 0)
801  {
802  hasColors = true;
803  }
804  }
805  if(!hasNormals)
806  {
807  // use polygons
808  if(hasColors)
809  {
810  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZRGBNormal>);
811  pcl::fromPCLPointCloud2(mesh->cloud, *cloud);
812 
813  for(unsigned int i=0; i<mesh->polygons.size(); ++i)
814  {
815  pcl::Vertices & v = mesh->polygons[i];
816  UASSERT(v.vertices.size()>2);
817  Eigen::Vector3f v0(
818  cloud->at(v.vertices[1]).x - cloud->at(v.vertices[0]).x,
819  cloud->at(v.vertices[1]).y - cloud->at(v.vertices[0]).y,
820  cloud->at(v.vertices[1]).z - cloud->at(v.vertices[0]).z);
821  int last = v.vertices.size()-1;
822  Eigen::Vector3f v1(
823  cloud->at(v.vertices[last]).x - cloud->at(v.vertices[0]).x,
824  cloud->at(v.vertices[last]).y - cloud->at(v.vertices[0]).y,
825  cloud->at(v.vertices[last]).z - cloud->at(v.vertices[0]).z);
826  Eigen::Vector3f normal = v0.cross(v1);
827  normal.normalize();
828  // flat normal (per face)
829  for(unsigned int j=0; j<v.vertices.size(); ++j)
830  {
831  cloud->at(v.vertices[j]).normal_x = normal[0];
832  cloud->at(v.vertices[j]).normal_y = normal[1];
833  cloud->at(v.vertices[j]).normal_z = normal[2];
834  }
835  }
836  pcl::toPCLPointCloud2 (*cloud, textureMesh->cloud);
837  }
838  else
839  {
840  pcl::PointCloud<pcl::PointNormal>::Ptr cloud (new pcl::PointCloud<pcl::PointNormal>);
841  pcl::fromPCLPointCloud2(mesh->cloud, *cloud);
842 
843  for(unsigned int i=0; i<mesh->polygons.size(); ++i)
844  {
845  pcl::Vertices & v = mesh->polygons[i];
846  UASSERT(v.vertices.size()>2);
847  Eigen::Vector3f v0(
848  cloud->at(v.vertices[1]).x - cloud->at(v.vertices[0]).x,
849  cloud->at(v.vertices[1]).y - cloud->at(v.vertices[0]).y,
850  cloud->at(v.vertices[1]).z - cloud->at(v.vertices[0]).z);
851  int last = v.vertices.size()-1;
852  Eigen::Vector3f v1(
853  cloud->at(v.vertices[last]).x - cloud->at(v.vertices[0]).x,
854  cloud->at(v.vertices[last]).y - cloud->at(v.vertices[0]).y,
855  cloud->at(v.vertices[last]).z - cloud->at(v.vertices[0]).z);
856  Eigen::Vector3f normal = v0.cross(v1);
857  normal.normalize();
858  // flat normal (per face)
859  for(unsigned int j=0; j<v.vertices.size(); ++j)
860  {
861  cloud->at(v.vertices[j]).normal_x = normal[0];
862  cloud->at(v.vertices[j]).normal_y = normal[1];
863  cloud->at(v.vertices[j]).normal_z = normal[2];
864  }
865  }
866  pcl::toPCLPointCloud2 (*cloud, textureMesh->cloud);
867  }
868  }
869  }
870  return textureMesh;
871 }
872 
874  pcl::TextureMesh & textureMesh,
875  int minClusterSize)
876 {
877  UDEBUG("minClusterSize=%d", minClusterSize);
878  // Remove occluded polygons (polygons with no texture)
879  if(textureMesh.tex_coordinates.size())
880  {
881  // assume last texture is the occluded texture
882  textureMesh.tex_coordinates.pop_back();
883  textureMesh.tex_polygons.pop_back();
884  textureMesh.tex_materials.pop_back();
885 
886  if(minClusterSize!=0)
887  {
888  // concatenate all polygons
889  unsigned int totalSize = 0;
890  for(unsigned int t=0; t<textureMesh.tex_polygons.size(); ++t)
891  {
892  totalSize+=textureMesh.tex_polygons[t].size();
893  }
894  std::vector<pcl::Vertices> allPolygons(totalSize);
895  int oi=0;
896  for(unsigned int t=0; t<textureMesh.tex_polygons.size(); ++t)
897  {
898  for(unsigned int i=0; i<textureMesh.tex_polygons[t].size(); ++i)
899  {
900  allPolygons[oi++] = textureMesh.tex_polygons[t][i];
901  }
902  }
903 
904  // filter polygons
905  std::vector<std::set<int> > neighbors;
906  std::vector<std::set<int> > vertexToPolygons;
907  util3d::createPolygonIndexes(allPolygons,
908  (int)textureMesh.cloud.data.size()/textureMesh.cloud.point_step,
909  neighbors,
910  vertexToPolygons);
911 
912  std::list<std::list<int> > clusters = util3d::clusterPolygons(
913  neighbors,
914  minClusterSize<0?0:minClusterSize);
915 
916  std::set<int> validPolygons;
917  if(minClusterSize < 0)
918  {
919  // only keep the biggest cluster
920  std::list<std::list<int> >::iterator biggestClusterIndex = clusters.end();
921  unsigned int biggestClusterSize = 0;
922  for(std::list<std::list<int> >::iterator iter=clusters.begin(); iter!=clusters.end(); ++iter)
923  {
924  if(iter->size() > biggestClusterSize)
925  {
926  biggestClusterIndex = iter;
927  biggestClusterSize = iter->size();
928  }
929  }
930  if(biggestClusterIndex != clusters.end())
931  {
932  for(std::list<int>::iterator jter=biggestClusterIndex->begin(); jter!=biggestClusterIndex->end(); ++jter)
933  {
934  validPolygons.insert(*jter);
935  }
936  }
937  }
938  else
939  {
940  for(std::list<std::list<int> >::iterator iter=clusters.begin(); iter!=clusters.end(); ++iter)
941  {
942  for(std::list<int>::iterator jter=iter->begin(); jter!=iter->end(); ++jter)
943  {
944  validPolygons.insert(*jter);
945  }
946  }
947  }
948 
949  if(validPolygons.size() == 0)
950  {
951  UWARN("All %d polygons filtered after polygon cluster filtering. Cluster minimum size is %d.",totalSize, minClusterSize);
952  }
953 
954  // for each texture
955  unsigned int allPolygonsIndex = 0;
956  for(unsigned int t=0; t<textureMesh.tex_polygons.size(); ++t)
957  {
958  std::vector<pcl::Vertices> filteredPolygons(textureMesh.tex_polygons[t].size());
959 #if PCL_VERSION_COMPARE(>=, 1, 8, 0)
960  std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > filteredCoordinates(textureMesh.tex_coordinates[t].size());
961 #else
962  std::vector<Eigen::Vector2f> filteredCoordinates(textureMesh.tex_coordinates[t].size());
963 #endif
964 
965  if(textureMesh.tex_polygons[t].size())
966  {
967  UASSERT_MSG(allPolygonsIndex < allPolygons.size(), uFormat("%d vs %d", (int)allPolygonsIndex, (int)allPolygons.size()).c_str());
968 
969  // make index polygon to coordinate
970  std::vector<unsigned int> polygonToCoord(textureMesh.tex_polygons[t].size());
971  unsigned int totalCoord = 0;
972  for(unsigned int i=0; i<textureMesh.tex_polygons[t].size(); ++i)
973  {
974  polygonToCoord[i] = totalCoord;
975  totalCoord+=textureMesh.tex_polygons[t][i].vertices.size();
976  }
977  UASSERT_MSG(totalCoord == textureMesh.tex_coordinates[t].size(), uFormat("%d vs %d", totalCoord, (int)textureMesh.tex_coordinates[t].size()).c_str());
978 
979  int oi=0;
980  int ci=0;
981  for(unsigned int i=0; i<textureMesh.tex_polygons[t].size(); ++i)
982  {
983  if(validPolygons.find(allPolygonsIndex) != validPolygons.end())
984  {
985  filteredPolygons[oi] = textureMesh.tex_polygons[t].at(i);
986  for(unsigned int j=0; j<filteredPolygons[oi].vertices.size(); ++j)
987  {
988  UASSERT(polygonToCoord[i] < textureMesh.tex_coordinates[t].size());
989  filteredCoordinates[ci] = textureMesh.tex_coordinates[t][polygonToCoord[i]+j];
990  ++ci;
991  }
992  ++oi;
993  }
994  ++allPolygonsIndex;
995  }
996  filteredPolygons.resize(oi);
997  filteredCoordinates.resize(ci);
998  textureMesh.tex_polygons[t] = filteredPolygons;
999  textureMesh.tex_coordinates[t] = filteredCoordinates;
1000  }
1001  }
1002  }
1003  }
1004 }
1005 
1006 pcl::TextureMesh::Ptr concatenateTextureMeshes(const std::list<pcl::TextureMesh::Ptr> & meshes)
1007 {
1008  pcl::TextureMesh::Ptr output(new pcl::TextureMesh);
1009  std::map<std::string, int> addedMaterials; //<file, index>
1010  for(std::list<pcl::TextureMesh::Ptr>::const_iterator iter = meshes.begin(); iter!=meshes.end(); ++iter)
1011  {
1012  if((*iter)->cloud.point_step &&
1013  (*iter)->cloud.data.size()/(*iter)->cloud.point_step &&
1014  (*iter)->tex_polygons.size() &&
1015  (*iter)->tex_coordinates.size())
1016  {
1017  // append point cloud
1018  int polygonStep = output->cloud.height * output->cloud.width;
1019  pcl::PCLPointCloud2 tmp;
1020 #if PCL_VERSION_COMPARE(>=, 1, 10, 0)
1021  pcl::concatenate(output->cloud, iter->get()->cloud, tmp);
1022 #else
1023  pcl::concatenatePointCloud(output->cloud, iter->get()->cloud, tmp);
1024 #endif
1025  output->cloud = tmp;
1026 
1027  UASSERT((*iter)->tex_polygons.size() == (*iter)->tex_coordinates.size() &&
1028  (*iter)->tex_polygons.size() == (*iter)->tex_materials.size());
1029 
1030  int materialCount = (*iter)->tex_polygons.size();
1031  for(int i=0; i<materialCount; ++i)
1032  {
1033  std::map<std::string, int>::iterator jter = addedMaterials.find((*iter)->tex_materials[i].tex_file);
1034  int index;
1035  if(jter != addedMaterials.end())
1036  {
1037  index = jter->second;
1038  }
1039  else
1040  {
1041  addedMaterials.insert(std::make_pair((*iter)->tex_materials[i].tex_file, output->tex_materials.size()));
1042  index = output->tex_materials.size();
1043  output->tex_materials.push_back((*iter)->tex_materials[i]);
1044  output->tex_materials.back().tex_name = uFormat("material_%d", index);
1045  output->tex_polygons.resize(output->tex_polygons.size() + 1);
1046  output->tex_coordinates.resize(output->tex_coordinates.size() + 1);
1047  }
1048 
1049  // update and append polygon indices
1050  int oi = output->tex_polygons[index].size();
1051  output->tex_polygons[index].resize(output->tex_polygons[index].size() + (*iter)->tex_polygons[i].size());
1052  for(unsigned int j=0; j<(*iter)->tex_polygons[i].size(); ++j)
1053  {
1054  pcl::Vertices polygon = (*iter)->tex_polygons[i][j];
1055  for(unsigned int k=0; k<polygon.vertices.size(); ++k)
1056  {
1057  polygon.vertices[k] += polygonStep;
1058  }
1059  output->tex_polygons[index][oi+j] = polygon;
1060  }
1061 
1062  // append uv coordinates
1063  oi = output->tex_coordinates[index].size();
1064  output->tex_coordinates[index].resize(output->tex_coordinates[index].size() + (*iter)->tex_coordinates[i].size());
1065  for(unsigned int j=0; j<(*iter)->tex_coordinates[i].size(); ++j)
1066  {
1067  output->tex_coordinates[index][oi+j] = (*iter)->tex_coordinates[i][j];
1068  }
1069  }
1070  }
1071  }
1072  return output;
1073 }
1074 
1075 int gcd(int a, int b) {
1076  return b == 0 ? a : gcd(b, a % b);
1077 }
1078 
1079 void concatenateTextureMaterials(pcl::TextureMesh & mesh, const cv::Size & imageSize, int textureSize, int maxTextures, float & scale, std::vector<bool> * materialsKept)
1080 {
1081  UASSERT(textureSize>0 && imageSize.width>0 && imageSize.height>0);
1082  if(maxTextures < 1)
1083  {
1084  maxTextures = 1;
1085  }
1086  int materials = 0;
1087  for(unsigned int i=0; i<mesh.tex_materials.size(); ++i)
1088  {
1089  if(mesh.tex_polygons.size())
1090  {
1091  ++materials;
1092  }
1093  }
1094  if(materials)
1095  {
1096  int w = imageSize.width; // 640
1097  int h = imageSize.height; // 480
1098  int g = gcd(w,h); // 160
1099  int a = w/g; // 4=640/160
1100  int b = h/g; // 3=480/160
1101  UDEBUG("w=%d h=%d g=%d a=%d b=%d", w, h, g, a, b);
1102  int colCount = 0;
1103  int rowCount = 0;
1104  float factor = 0.1f;
1105  float epsilon = 0.001f;
1106  scale = 1.0f;
1107  while((colCount*rowCount)*maxTextures < materials || (factor == 0.1f || scale > 1.0f))
1108  {
1109  // first run try scale = 1 (no scaling)
1110  if(factor!=0.1f)
1111  {
1112  scale = float(textureSize)/float(w*b*factor);
1113  }
1114  colCount = float(textureSize)/(scale*float(w));
1115  rowCount = float(textureSize)/(scale*float(h));
1116  factor+=epsilon; // search the maximum perfect fit
1117  }
1118  int outputTextures = (materials / (colCount*rowCount)) + (materials % (colCount*rowCount) > 0?1:0);
1119  UDEBUG("materials=%d col=%d row=%d output textures=%d factor=%f scale=%f", materials, colCount, rowCount, outputTextures, factor-epsilon, scale);
1120 
1121  UASSERT(mesh.tex_coordinates.size() == mesh.tex_materials.size() && mesh.tex_polygons.size() == mesh.tex_materials.size());
1122 
1123  // prepare size
1124  std::vector<int> totalPolygons(outputTextures, 0);
1125  std::vector<int> totalCoordinates(outputTextures, 0);
1126  int count = 0;
1127  for(unsigned int i=0; i<mesh.tex_materials.size(); ++i)
1128  {
1129  if(mesh.tex_polygons[i].size())
1130  {
1131  int indexMaterial = count / (colCount*rowCount);
1132  UASSERT(indexMaterial < outputTextures);
1133 
1134  totalPolygons[indexMaterial]+=mesh.tex_polygons[i].size();
1135  totalCoordinates[indexMaterial]+=mesh.tex_coordinates[i].size();
1136 
1137  ++count;
1138  }
1139  }
1140 
1141  pcl::TextureMesh outputMesh;
1142 
1143  int pi = 0;
1144  int ci = 0;
1145  int ti=0;
1146  float scaledHeight = float(int(scale*float(h)))/float(textureSize);
1147  float scaledWidth = float(int(scale*float(w)))/float(textureSize);
1148  float lowerBorderSize = 1.0f - scaledHeight*float(rowCount);
1149  UDEBUG("scaledWidth=%f scaledHeight=%f lowerBorderSize=%f", scaledWidth, scaledHeight, lowerBorderSize);
1150  if(materialsKept)
1151  {
1152  materialsKept->resize(mesh.tex_materials.size(), false);
1153  }
1154  for(unsigned int t=0; t<mesh.tex_materials.size(); ++t)
1155  {
1156  if(mesh.tex_polygons[t].size())
1157  {
1158  int indexMaterial = ti / (colCount*rowCount);
1159  UASSERT(indexMaterial < outputTextures);
1160  if((int)outputMesh.tex_polygons.size() <= indexMaterial)
1161  {
1162  std::vector<pcl::Vertices> newPolygons(totalPolygons[indexMaterial]);
1163 #if PCL_VERSION_COMPARE(>=, 1, 8, 0)
1164  std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > newCoordinates(totalCoordinates[indexMaterial]); // UV coordinates
1165 #else
1166  std::vector<Eigen::Vector2f> newCoordinates(totalCoordinates[indexMaterial]); // UV coordinates
1167 #endif
1168  outputMesh.tex_polygons.push_back(newPolygons);
1169  outputMesh.tex_coordinates.push_back(newCoordinates);
1170 
1171  pi=0;
1172  ci=0;
1173  }
1174 
1175  int row = (ti/colCount) % rowCount;
1176  int col = ti%colCount;
1177  float offsetU = scaledWidth * float(col);
1178  float offsetV = scaledHeight * float((rowCount - 1) - row) + lowerBorderSize;
1179  // Texture coords have lower-left origin
1180 
1181  for(unsigned int i=0; i<mesh.tex_polygons[t].size(); ++i)
1182  {
1183  UASSERT(pi < (int)outputMesh.tex_polygons[indexMaterial].size());
1184  outputMesh.tex_polygons[indexMaterial][pi++] = mesh.tex_polygons[t].at(i);
1185  }
1186 
1187  for(unsigned int i=0; i<mesh.tex_coordinates[t].size(); ++i)
1188  {
1189  const Eigen::Vector2f & v = mesh.tex_coordinates[t].at(i);
1190  if(v[0] >= 0 && v[1] >=0)
1191  {
1192  outputMesh.tex_coordinates[indexMaterial][ci][0] = v[0]*scaledWidth + offsetU;
1193  outputMesh.tex_coordinates[indexMaterial][ci][1] = v[1]*scaledHeight + offsetV;
1194  }
1195  else
1196  {
1197  outputMesh.tex_coordinates[indexMaterial][ci] = v;
1198  }
1199  ++ci;
1200  }
1201  ++ti;
1202  if(materialsKept)
1203  {
1204  materialsKept->at(t) = true;
1205  }
1206  }
1207  }
1208  pcl::TexMaterial m = mesh.tex_materials.front();
1209  mesh.tex_materials.clear();
1210  for(int i=0; i<outputTextures; ++i)
1211  {
1212  m.tex_file = "texture";
1213  m.tex_name = "material";
1214  if(outputTextures > 1)
1215  {
1216  m.tex_file += uNumber2Str(i);
1217  m.tex_name += uNumber2Str(i);
1218  }
1219 
1220  mesh.tex_materials.push_back(m);
1221  }
1222  mesh.tex_coordinates = outputMesh.tex_coordinates;
1223  mesh.tex_polygons = outputMesh.tex_polygons;
1224  }
1225 }
1226 
1227 std::vector<std::vector<RTABMAP_PCL_INDEX> > convertPolygonsFromPCL(const std::vector<pcl::Vertices> & polygons)
1228 {
1229  std::vector<std::vector<RTABMAP_PCL_INDEX> > polygonsOut(polygons.size());
1230  for(unsigned int p=0; p<polygons.size(); ++p)
1231  {
1232  polygonsOut[p] = polygons[p].vertices;
1233  }
1234  return polygonsOut;
1235 }
1236 std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > convertPolygonsFromPCL(const std::vector<std::vector<pcl::Vertices> > & tex_polygons)
1237 {
1238  std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > polygonsOut(tex_polygons.size());
1239  for(unsigned int t=0; t<tex_polygons.size(); ++t)
1240  {
1241  polygonsOut[t].resize(tex_polygons[t].size());
1242  for(unsigned int p=0; p<tex_polygons[t].size(); ++p)
1243  {
1244  polygonsOut[t][p] = tex_polygons[t][p].vertices;
1245  }
1246  }
1247  return polygonsOut;
1248 }
1249 std::vector<pcl::Vertices> convertPolygonsToPCL(const std::vector<std::vector<RTABMAP_PCL_INDEX> > & polygons)
1250 {
1251  std::vector<pcl::Vertices> polygonsOut(polygons.size());
1252  for(unsigned int p=0; p<polygons.size(); ++p)
1253  {
1254  polygonsOut[p].vertices = polygons[p];
1255  }
1256  return polygonsOut;
1257 }
1258 std::vector<std::vector<pcl::Vertices> > convertPolygonsToPCL(const std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > & tex_polygons)
1259 {
1260  std::vector<std::vector<pcl::Vertices> > polygonsOut(tex_polygons.size());
1261  for(unsigned int t=0; t<tex_polygons.size(); ++t)
1262  {
1263  polygonsOut[t].resize(tex_polygons[t].size());
1264  for(unsigned int p=0; p<tex_polygons[t].size(); ++p)
1265  {
1266  polygonsOut[t][p].vertices = tex_polygons[t][p];
1267  }
1268  }
1269  return polygonsOut;
1270 }
1271 
1272 pcl::TextureMesh::Ptr assembleTextureMesh(
1273  const cv::Mat & cloudMat,
1274  const std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > & polygons,
1275 #if PCL_VERSION_COMPARE(>=, 1, 8, 0)
1276  const std::vector<std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > > & texCoords,
1277 #else
1278  const std::vector<std::vector<Eigen::Vector2f> > & texCoords,
1279 #endif
1280  cv::Mat & textures,
1281  bool mergeTextures)
1282 {
1283  pcl::TextureMesh::Ptr textureMesh(new pcl::TextureMesh);
1284 
1285  if(cloudMat.channels() <= 3)
1286  {
1287  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::laserScanToPointCloud(LaserScan::backwardCompatibility(cloudMat));
1288  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1289  }
1290  else if(cloudMat.channels() == 4)
1291  {
1292  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGB(LaserScan::backwardCompatibility(cloudMat));
1293  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1294  }
1295  else if(cloudMat.channels() == 6)
1296  {
1297  pcl::PointCloud<pcl::PointNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudNormal(LaserScan::backwardCompatibility(cloudMat));
1298  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1299  }
1300  else if(cloudMat.channels() == 7)
1301  {
1302  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGBNormal(LaserScan::backwardCompatibility(cloudMat));
1303  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1304  }
1305 
1306  if(textureMesh->cloud.data.size() && polygons.size())
1307  {
1308  textureMesh->tex_polygons.resize(polygons.size());
1309  for(unsigned int t=0; t<polygons.size(); ++t)
1310  {
1311  textureMesh->tex_polygons[t].resize(polygons[t].size());
1312  for(unsigned int p=0; p<polygons[t].size(); ++p)
1313  {
1314  textureMesh->tex_polygons[t][p].vertices = polygons[t][p];
1315  }
1316  }
1317 
1318  if(!texCoords.empty() && !textures.empty())
1319  {
1320  textureMesh->tex_coordinates = texCoords;
1321 
1322  textureMesh->tex_materials.resize (textureMesh->tex_coordinates.size());
1323  for(unsigned int i = 0 ; i < textureMesh->tex_coordinates.size() ; ++i)
1324  {
1325  pcl::TexMaterial mesh_material;
1326  mesh_material.tex_Ka.r = 0.2f;
1327  mesh_material.tex_Ka.g = 0.2f;
1328  mesh_material.tex_Ka.b = 0.2f;
1329 
1330  mesh_material.tex_Kd.r = 0.8f;
1331  mesh_material.tex_Kd.g = 0.8f;
1332  mesh_material.tex_Kd.b = 0.8f;
1333 
1334  mesh_material.tex_Ks.r = 1.0f;
1335  mesh_material.tex_Ks.g = 1.0f;
1336  mesh_material.tex_Ks.b = 1.0f;
1337 
1338  mesh_material.tex_d = 1.0f;
1339  mesh_material.tex_Ns = 75.0f;
1340  mesh_material.tex_illum = 2;
1341 
1342  std::stringstream tex_name;
1343  tex_name << "material_" << i;
1344  tex_name >> mesh_material.tex_name;
1345 
1346  mesh_material.tex_file = uFormat("%d", i);
1347 
1348  textureMesh->tex_materials[i] = mesh_material;
1349  }
1350 
1351  if(mergeTextures && textures.cols/textures.rows > 1)
1352  {
1353  UASSERT(textures.cols % textures.rows == 0 && textures.cols/textures.rows == (int)textureMesh->tex_coordinates.size());
1354  std::vector<bool> materialsKept;
1355  float scale = 0.0f;
1356  cv::Size imageSize(textures.rows, textures.rows);
1357  int imageType = textures.type();
1358  rtabmap::util3d::concatenateTextureMaterials(*textureMesh, imageSize, textures.rows, 1, scale, &materialsKept);
1359  if(scale && textureMesh->tex_materials.size() == 1)
1360  {
1361  int cols = float(textures.rows)/(scale*imageSize.width);
1362  int rows = float(textures.rows)/(scale*imageSize.height);
1363 
1364  cv::Mat mergedTextures = cv::Mat(textures.rows, textures.rows, imageType, cv::Scalar::all(255));
1365 
1366  // make a blank texture
1367  cv::Size resizedImageSize(int(imageSize.width*scale), int(imageSize.height*scale));
1368  int oi=0;
1369  for(int i=0; i<(int)materialsKept.size(); ++i)
1370  {
1371  if(materialsKept.at(i))
1372  {
1373  int u = oi%cols * resizedImageSize.width;
1374  int v = ((oi/cols) % rows ) * resizedImageSize.height;
1375  UASSERT(u < textures.rows-resizedImageSize.width);
1376  UASSERT(v < textures.rows-resizedImageSize.height);
1377 
1378  cv::Mat resizedImage;
1379  cv::resize(textures(cv::Range::all(), cv::Range(i*textures.rows, (i+1)*textures.rows)), resizedImage, resizedImageSize, 0.0f, 0.0f, cv::INTER_AREA);
1380 
1381  UASSERT(resizedImage.type() == mergedTextures.type());
1382  resizedImage.copyTo(mergedTextures(cv::Rect(u, v, resizedImage.cols, resizedImage.rows)));
1383 
1384  ++oi;
1385  }
1386  }
1387  textures = mergedTextures;
1388  }
1389  }
1390  }
1391  }
1392  return textureMesh;
1393 }
1394 
1395 pcl::PolygonMesh::Ptr assemblePolygonMesh(
1396  const cv::Mat & cloudMat,
1397  const std::vector<std::vector<RTABMAP_PCL_INDEX> > & polygons)
1398 {
1399  pcl::PolygonMesh::Ptr polygonMesh(new pcl::PolygonMesh);
1400 
1401  if(cloudMat.channels() <= 3)
1402  {
1403  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::laserScanToPointCloud(LaserScan::backwardCompatibility(cloudMat));
1404  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1405  }
1406  else if(cloudMat.channels() == 4)
1407  {
1408  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGB(LaserScan::backwardCompatibility(cloudMat));
1409  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1410  }
1411  else if(cloudMat.channels() == 6)
1412  {
1413  pcl::PointCloud<pcl::PointNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudNormal(LaserScan::backwardCompatibility(cloudMat));
1414  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1415  }
1416  else if(cloudMat.channels() == 7)
1417  {
1418  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGBNormal(LaserScan::backwardCompatibility(cloudMat));
1419  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1420  }
1421 
1422  if(polygonMesh->cloud.data.size() && polygons.size())
1423  {
1424  polygonMesh->polygons.resize(polygons.size());
1425  for(unsigned int p=0; p<polygons.size(); ++p)
1426  {
1427  polygonMesh->polygons[p].vertices = polygons[p];
1428  }
1429  }
1430  return polygonMesh;
1431 }
1432 
1433 double sqr(uchar v)
1434 {
1435  return double(v)*double(v);
1436 }
1437 
1439  pcl::TextureMesh & mesh,
1440  const std::map<int, cv::Mat> & images,
1441  const std::map<int, CameraModel> & calibrations,
1442  const Memory * memory,
1443  const DBDriver * dbDriver,
1444  int textureSize,
1445  int textureCount,
1446  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels,
1447  bool gainCompensation,
1448  float gainBeta,
1449  bool gainRGB,
1450  bool blending,
1451  int blendingDecimation,
1452  int brightnessContrastRatioLow,
1453  int brightnessContrastRatioHigh,
1454  bool exposureFusion,
1455  const ProgressState * state,
1456  unsigned char blankValue,
1457  std::map<int, std::map<int, cv::Vec4d> > * gains,
1458  std::map<int, std::map<int, cv::Mat> > * blendingGains,
1459  std::pair<float, float> * contrastValues)
1460 {
1461  std::map<int, std::vector<CameraModel> > calibVectors;
1462  for(std::map<int, CameraModel>::const_iterator iter=calibrations.begin(); iter!=calibrations.end(); ++iter)
1463  {
1464  std::vector<CameraModel> m;
1465  m.push_back(iter->second);
1466  calibVectors.insert(std::make_pair(iter->first, m));
1467  }
1468  return mergeTextures(mesh,
1469  images,
1470  calibVectors,
1471  memory,
1472  dbDriver,
1473  textureSize,
1474  textureCount,
1475  vertexToPixels,
1476  gainCompensation,
1477  gainBeta,
1478  gainRGB,
1479  blending,
1480  blendingDecimation,
1481  brightnessContrastRatioLow,
1482  brightnessContrastRatioHigh,
1484  state,
1485  blankValue,
1486  gains,
1487  blendingGains,
1488  contrastValues);
1489 }
1491  pcl::TextureMesh & mesh,
1492  const std::map<int, cv::Mat> & images,
1493  const std::map<int, std::vector<CameraModel> > & calibrations,
1494  const Memory * memory,
1495  const DBDriver * dbDriver,
1496  int textureSize,
1497  int textureCount,
1498  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels,
1499  bool gainCompensation,
1500  float gainBeta,
1501  bool gainRGB,
1502  bool blending,
1503  int blendingDecimation,
1504  int brightnessContrastRatioLow,
1505  int brightnessContrastRatioHigh,
1506  bool exposureFusion,
1507  const ProgressState * state,
1508  unsigned char blankValue,
1509  std::map<int, std::map<int, cv::Vec4d> > * gainsOut,
1510  std::map<int, std::map<int, cv::Mat> > * blendingGainsOut,
1511  std::pair<float, float> * contrastValuesOut)
1512 {
1513  //get texture size, if disabled use default 1024
1514  UASSERT(textureSize%256 == 0);
1515  UDEBUG("textureSize = %d", textureSize);
1516  cv::Mat globalTextures;
1517  if(!mesh.tex_materials.empty())
1518  {
1519  std::vector<std::pair<int, int> > textures(mesh.tex_materials.size(), std::pair<int, int>(-1,0));
1520  cv::Size imageSize;
1521  const int imageType=CV_8UC3;
1522 
1523  UDEBUG("");
1524  for(unsigned int i=0; i<mesh.tex_materials.size(); ++i)
1525  {
1526  std::list<std::string> texFileSplit = uSplit(mesh.tex_materials[i].tex_file, '_');
1527  if(!mesh.tex_materials[i].tex_file.empty() &&
1528  mesh.tex_polygons[i].size() &&
1529  uIsInteger(texFileSplit.front(), false))
1530  {
1531  textures[i].first = uStr2Int(texFileSplit.front());
1532  if(texFileSplit.size() == 2 &&
1533  uIsInteger(texFileSplit.back(), false) )
1534  {
1535  textures[i].second = uStr2Int(texFileSplit.back());
1536  }
1537 
1538  int textureId = textures[i].first;
1539  if(imageSize.width == 0 || imageSize.height == 0)
1540  {
1541  if(images.find(textureId) != images.end() &&
1542  !images.find(textureId)->second.empty() &&
1543  calibrations.find(textureId) != calibrations.end())
1544  {
1545  const std::vector<CameraModel> & models = calibrations.find(textureId)->second;
1546  UASSERT(models.size()>=1);
1547  if( models[0].imageHeight()>0 &&
1548  models[0].imageWidth()>0)
1549  {
1550  imageSize = models[0].imageSize();
1551  }
1552  else if(images.find(textureId)!=images.end())
1553  {
1554  // backward compatibility for image size not set in CameraModel
1555  cv::Mat image = images.find(textureId)->second;
1556  if(image.rows == 1 && image.type() == CV_8UC1)
1557  {
1558  image = uncompressImage(image);
1559  }
1560  UASSERT(!image.empty());
1561  imageSize = image.size();
1562  if(models.size()>1)
1563  {
1564  imageSize.width/=models.size();
1565  }
1566  }
1567  }
1568  else if(memory)
1569  {
1570  SensorData data = memory->getNodeData(textureId, true, false, false, false);
1571  const std::vector<CameraModel> & models = data.cameraModels();
1572  const std::vector<StereoCameraModel> & stereoModels = data.stereoCameraModels();
1573  if(models.size()>=1 &&
1574  models[0].imageHeight()>0 &&
1575  models[0].imageWidth()>0)
1576  {
1577  imageSize = models[0].imageSize();
1578  }
1579  else if(stereoModels.size()>=1 &&
1580  stereoModels[0].left().imageHeight() > 0 &&
1581  stereoModels[0].left().imageWidth() > 0)
1582  {
1583  imageSize = stereoModels[0].left().imageSize();
1584  }
1585  else // backward compatibility for image size not set in CameraModel
1586  {
1587  cv::Mat image;
1588  data.uncompressDataConst(&image, 0);
1589  UASSERT(!image.empty());
1590  imageSize = image.size();
1591  if(data.cameraModels().size()>1)
1592  {
1593  imageSize.width/=data.cameraModels().size();
1594  }
1595  }
1596  }
1597  else if(dbDriver)
1598  {
1599  std::vector<CameraModel> models;
1600  std::vector<StereoCameraModel> stereoModels;
1601  dbDriver->getCalibration(textureId, models, stereoModels);
1602  if(models.size()>=1 &&
1603  models[0].imageHeight()>0 &&
1604  models[0].imageWidth()>0)
1605  {
1606  imageSize = models[0].imageSize();
1607  }
1608  else if(stereoModels.size()>=1 &&
1609  stereoModels[0].left().imageHeight() > 0 &&
1610  stereoModels[0].left().imageWidth() > 0)
1611  {
1612  imageSize = stereoModels[0].left().imageSize();
1613  }
1614  else // backward compatibility for image size not set in CameraModel
1615  {
1616  SensorData data;
1617  dbDriver->getNodeData(textureId, data, true, false, false, false);
1618  cv::Mat image;
1619  data.uncompressDataConst(&image, 0);
1620  UASSERT(!image.empty());
1621  imageSize = image.size();
1622  if(data.cameraModels().size()>1)
1623  {
1624  imageSize.width/=data.cameraModels().size();
1625  }
1626  }
1627  }
1628  }
1629  }
1630  else if(mesh.tex_polygons[i].size() && mesh.tex_materials[i].tex_file.compare("occluded")!=0)
1631  {
1632  UWARN("Failed parsing texture file name: %s", mesh.tex_materials[i].tex_file.c_str());
1633  }
1634  }
1635  UDEBUG("textures=%d imageSize=%dx%d", (int)textures.size(), imageSize.height, imageSize.width);
1636  if(textures.size() && imageSize.height>0 && imageSize.width>0)
1637  {
1638  float scale = 0.0f;
1639  UDEBUG("");
1640  std::vector<bool> materialsKept;
1641  util3d::concatenateTextureMaterials(mesh, imageSize, textureSize, textureCount, scale, &materialsKept);
1642  if(scale && mesh.tex_materials.size())
1643  {
1644  int materials = (int)mesh.tex_materials.size();
1645  int cols = float(textureSize)/(scale*imageSize.width);
1646  int rows = float(textureSize)/(scale*imageSize.height);
1647 
1648  globalTextures = cv::Mat(textureSize, materials*textureSize, imageType, cv::Scalar::all(blankValue));
1649  cv::Mat globalTextureMasks = cv::Mat(textureSize, materials*textureSize, CV_8UC1, cv::Scalar::all(0));
1650 
1651  // used for multi camera texturing, to avoid reloading same texture for sub cameras
1652  cv::Mat previousImage;
1653  int previousTextureId = 0;
1654  std::vector<CameraModel> previousCameraModels;
1655 
1656  // make a blank texture
1657  cv::Mat emptyImage(int(imageSize.height*scale), int(imageSize.width*scale), imageType, cv::Scalar::all(blankValue));
1658  cv::Mat emptyImageMask(int(imageSize.height*scale), int(imageSize.width*scale), CV_8UC1, cv::Scalar::all(255));
1659  int oi=0;
1660  std::vector<cv::Point2i> imageOrigin(textures.size());
1661  std::vector<int> newCamIndex(textures.size(), -1);
1662  for(int t=0; t<(int)textures.size(); ++t)
1663  {
1664  if(materialsKept.at(t))
1665  {
1666  int indexMaterial = oi / (cols*rows);
1667  UASSERT(indexMaterial < materials);
1668 
1669  newCamIndex[t] = oi;
1670  int u = oi%cols * emptyImage.cols;
1671  int v = ((oi/cols) % rows ) * emptyImage.rows;
1672  UASSERT_MSG(u < textureSize-emptyImage.cols, uFormat("u=%d textureSize=%d emptyImage.cols=%d", u, textureSize, emptyImage.cols).c_str());
1673  UASSERT_MSG(v < textureSize-emptyImage.rows, uFormat("v=%d textureSize=%d emptyImage.rows=%d", v, textureSize, emptyImage.rows).c_str());
1674  imageOrigin[t].x = u;
1675  imageOrigin[t].y = v;
1676  if(textures[t].first>=0)
1677  {
1678  cv::Mat image;
1679  std::vector<CameraModel> models;
1680 
1681  if(textures[t].first == previousTextureId)
1682  {
1683  image = previousImage;
1684  models = previousCameraModels;
1685  }
1686  else
1687  {
1688  if(images.find(textures[t].first) != images.end() &&
1689  !images.find(textures[t].first)->second.empty() &&
1690  calibrations.find(textures[t].first) != calibrations.end())
1691  {
1692  image = images.find(textures[t].first)->second;
1693  if(image.rows == 1 && image.type() == CV_8UC1)
1694  {
1695  image = uncompressImage(image);
1696  }
1697  models = calibrations.find(textures[t].first)->second;
1698  }
1699  else if(memory)
1700  {
1701  SensorData data = memory->getNodeData(textures[t].first, true, false, false, false);
1702  models = data.cameraModels();
1703  if(models.empty() && !data.stereoCameraModels().empty())
1704  {
1705  for(size_t i=0; i<data.stereoCameraModels().size(); ++i)
1706  {
1707  models.push_back(data.stereoCameraModels()[i].left());
1708  }
1709  }
1710  data.uncompressDataConst(&image, 0);
1711  }
1712  else if(dbDriver)
1713  {
1714  SensorData data;
1715  dbDriver->getNodeData(textures[t].first, data, true, false, false, false);
1716  data.uncompressDataConst(&image, 0);
1717  std::vector<StereoCameraModel> stereoModels;
1718  dbDriver->getCalibration(textures[t].first, models, stereoModels);
1719  if(models.empty() && !stereoModels.empty())
1720  {
1721  for(size_t i=0; i<stereoModels.size(); ++i)
1722  {
1723  models.push_back(stereoModels[i].left());
1724  }
1725  }
1726  }
1727 
1728  previousImage = image;
1729  previousCameraModels = models;
1730  previousTextureId = textures[t].first;
1731  }
1732 
1733  UASSERT(!image.empty());
1734 
1735  if(textures[t].second>=0)
1736  {
1737  UASSERT(textures[t].second < (int)models.size());
1738  int width = image.cols/models.size();
1739  image = image.colRange(width*textures[t].second, width*(textures[t].second+1));
1740  }
1741 
1742  cv::Mat resizedImage;
1743  cv::resize(image, resizedImage, emptyImage.size(), 0.0f, 0.0f, cv::INTER_AREA);
1744  UASSERT(resizedImage.type() == CV_8UC1 || resizedImage.type() == CV_8UC3);
1745  if(resizedImage.type() == CV_8UC1)
1746  {
1747  cv::Mat resizedImageColor;
1748  cv::cvtColor(resizedImage, resizedImageColor, CV_GRAY2BGR);
1749  resizedImage = resizedImageColor;
1750  }
1751  UASSERT(resizedImage.type() == globalTextures.type());
1752  resizedImage.copyTo(globalTextures(cv::Rect(u+indexMaterial*globalTextures.rows, v, resizedImage.cols, resizedImage.rows)));
1753  emptyImageMask.copyTo(globalTextureMasks(cv::Rect(u+indexMaterial*globalTextureMasks.rows, v, resizedImage.cols, resizedImage.rows)));
1754  }
1755  else
1756  {
1757  emptyImage.copyTo(globalTextures(cv::Rect(u+indexMaterial*globalTextures.rows, v, emptyImage.cols, emptyImage.rows)));
1758  }
1759  ++oi;
1760  }
1761 
1762  if(state)
1763  {
1764  if(state->isCanceled())
1765  {
1766  return cv::Mat();
1767  }
1768  state->callback(uFormat("Assembled texture %d/%d.", t+1, (int)textures.size()));
1769  }
1770  }
1771 
1772  UTimer timer;
1773  if(vertexToPixels.size())
1774  {
1775  //UWARN("Saving original.png", globalTexture);
1776  //cv::imwrite("original.png", globalTexture);
1777 
1778  if(gainCompensation)
1779  {
1784  const int num_images = static_cast<int>(oi);
1785  cv::Mat_<int> N(num_images, num_images); N.setTo(0);
1786  cv::Mat_<double> I(num_images, num_images); I.setTo(0);
1787 
1788  cv::Mat_<double> IR(num_images, num_images); IR.setTo(0);
1789  cv::Mat_<double> IG(num_images, num_images); IG.setTo(0);
1790  cv::Mat_<double> IB(num_images, num_images); IB.setTo(0);
1791 
1792  // Adjust UV coordinates to globalTexture
1793  for(unsigned int p=0; p<vertexToPixels.size(); ++p)
1794  {
1795  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[p].begin(); iter!=vertexToPixels[p].end(); ++iter)
1796  {
1797  if(materialsKept.at(iter->first))
1798  {
1799  N(newCamIndex[iter->first], newCamIndex[iter->first]) +=1;
1800 
1801  std::map<int, pcl::PointXY>::const_iterator jter=iter;
1802  ++jter;
1803  int k = 1;
1804  for(; jter!=vertexToPixels[p].end(); ++jter, ++k)
1805  {
1806  if(materialsKept.at(jter->first))
1807  {
1808  int i = newCamIndex[iter->first];
1809  int j = newCamIndex[jter->first];
1810 
1811  N(i, j) += 1;
1812  N(j, i) += 1;
1813 
1814  int indexMaterial = i / (cols*rows);
1815 
1816  // uv in globalTexture
1817  int ui = iter->second.x*emptyImage.cols + imageOrigin[iter->first].x;
1818  int vi = (1.0-iter->second.y)*emptyImage.rows + imageOrigin[iter->first].y;
1819  int uj = jter->second.x*emptyImage.cols + imageOrigin[jter->first].x;
1820  int vj = (1.0-jter->second.y)*emptyImage.rows + imageOrigin[jter->first].y;
1821  cv::Vec3b * pt1 = globalTextures.ptr<cv::Vec3b>(vi,ui+indexMaterial*globalTextures.rows);
1822  cv::Vec3b * pt2 = globalTextures.ptr<cv::Vec3b>(vj,uj+indexMaterial*globalTextures.rows);
1823 
1824  I(i, j) += std::sqrt(static_cast<double>(sqr(pt1->val[0]) + sqr(pt1->val[1]) + sqr(pt1->val[2])));
1825  I(j, i) += std::sqrt(static_cast<double>(sqr(pt2->val[0]) + sqr(pt2->val[1]) + sqr(pt2->val[2])));
1826 
1827  IR(i, j) += static_cast<double>(pt1->val[2]);
1828  IR(j, i) += static_cast<double>(pt2->val[2]);
1829  IG(i, j) += static_cast<double>(pt1->val[1]);
1830  IG(j, i) += static_cast<double>(pt2->val[1]);
1831  IB(i, j) += static_cast<double>(pt1->val[0]);
1832  IB(j, i) += static_cast<double>(pt2->val[0]);
1833  }
1834  }
1835  }
1836  }
1837  }
1838 
1839  for(int i=0; i<num_images; ++i)
1840  {
1841  for(int j=i; j<num_images; ++j)
1842  {
1843  if(i == j)
1844  {
1845  if(N(i,j) == 0)
1846  {
1847  N(i,j) = 1;
1848  }
1849  }
1850  else if(N(i, j))
1851  {
1852  I(i, j) /= N(i, j);
1853  I(j, i) /= N(j, i);
1854 
1855  IR(i, j) /= N(i, j);
1856  IR(j, i) /= N(j, i);
1857  IG(i, j) /= N(i, j);
1858  IG(j, i) /= N(j, i);
1859  IB(i, j) /= N(i, j);
1860  IB(j, i) /= N(j, i);
1861  }
1862  }
1863  }
1864 
1865  cv::Mat_<double> A(num_images, num_images); A.setTo(0);
1866  cv::Mat_<double> b(num_images, 1); b.setTo(0);
1867  cv::Mat_<double> AR(num_images, num_images); AR.setTo(0);
1868  cv::Mat_<double> AG(num_images, num_images); AG.setTo(0);
1869  cv::Mat_<double> AB(num_images, num_images); AB.setTo(0);
1870  double alpha = 0.01;
1871  double beta = gainBeta;
1872  for (int i = 0; i < num_images; ++i)
1873  {
1874  for (int j = 0; j < num_images; ++j)
1875  {
1876  b(i, 0) += beta * N(i, j);
1877  A(i, i) += beta * N(i, j);
1878  AR(i, i) += beta * N(i, j);
1879  AG(i, i) += beta * N(i, j);
1880  AB(i, i) += beta * N(i, j);
1881  if (j == i) continue;
1882  A(i, i) += 2 * alpha * I(i, j) * I(i, j) * N(i, j);
1883  A(i, j) -= 2 * alpha * I(i, j) * I(j, i) * N(i, j);
1884 
1885  AR(i, i) += 2 * alpha * IR(i, j) * IR(i, j) * N(i, j);
1886  AR(i, j) -= 2 * alpha * IR(i, j) * IR(j, i) * N(i, j);
1887 
1888  AG(i, i) += 2 * alpha * IG(i, j) * IG(i, j) * N(i, j);
1889  AG(i, j) -= 2 * alpha * IG(i, j) * IG(j, i) * N(i, j);
1890 
1891  AB(i, i) += 2 * alpha * IB(i, j) * IB(i, j) * N(i, j);
1892  AB(i, j) -= 2 * alpha * IB(i, j) * IB(j, i) * N(i, j);
1893  }
1894  }
1895 
1896  cv::Mat_<double> gainsGray, gainsR, gainsG, gainsB;
1897  cv::solve(A, b, gainsGray);
1898 
1899  cv::solve(AR, b, gainsR);
1900  cv::solve(AG, b, gainsG);
1901  cv::solve(AB, b, gainsB);
1902 
1903  cv::Mat_<double> gains(gainsGray.rows, 4);
1904  gainsGray.copyTo(gains.col(0));
1905  gainsR.copyTo(gains.col(1));
1906  gainsG.copyTo(gains.col(2));
1907  gainsB.copyTo(gains.col(3));
1908 
1909  for(int t=0; t<(int)textures.size(); ++t)
1910  {
1911  //break;
1912  if(materialsKept.at(t))
1913  {
1914  int u = imageOrigin[t].x;
1915  int v = imageOrigin[t].y;
1916 
1917  UDEBUG("Gain cam%d = %f", newCamIndex[t], gainsGray(newCamIndex[t], 0));
1918 
1919  int indexMaterial = newCamIndex[t] / (cols*rows);
1920  cv::Mat roi = globalTextures(cv::Rect(u+indexMaterial*globalTextures.rows, v, emptyImage.cols, emptyImage.rows));
1921 
1922  std::vector<cv::Mat> channels;
1923  cv::split(roi, channels);
1924 
1925  // assuming BGR
1926  cv::multiply(channels[0], gains(newCamIndex[t], gainRGB?3:0), channels[0]);
1927  cv::multiply(channels[1], gains(newCamIndex[t], gainRGB?2:0), channels[1]);
1928  cv::multiply(channels[2], gains(newCamIndex[t], gainRGB?1:0), channels[2]);
1929 
1930  cv::merge(channels, roi);
1931 
1932  if(gainsOut)
1933  {
1934  cv::Vec4d g(
1935  gains(newCamIndex[t], 0),
1936  gains(newCamIndex[t], 1),
1937  gains(newCamIndex[t], 2),
1938  gains(newCamIndex[t], 3));
1939  if(gainsOut->find(textures[t].first) == gainsOut->end())
1940  {
1941  std::map<int,cv::Vec4d> value;
1942  value.insert(std::make_pair(textures[t].second, g));
1943  gainsOut->insert(std::make_pair(textures[t].first, value));
1944  }
1945  else
1946  {
1947  gainsOut->at(textures[t].first).insert(std::make_pair(textures[t].second, g));
1948  }
1949  }
1950  }
1951  }
1952  //UWARN("Saving gain.png", globalTexture);
1953  //cv::imwrite("gain.png", globalTexture);
1954  if(state) state->callback(uFormat("Gain compensation %fs", timer.ticks()));
1955  }
1956 
1957  if(blending)
1958  {
1959  // blending BGR
1960  int decimation = 1;
1961  if(blendingDecimation <= 0)
1962  {
1963  // determinate decimation to apply
1964  std::vector<float> edgeLengths;
1965  if(mesh.tex_coordinates.size() && mesh.tex_coordinates[0].size())
1966  {
1967  UASSERT(mesh.tex_polygons.size() && mesh.tex_polygons[0].size() && mesh.tex_polygons[0][0].vertices.size());
1968  int polygonSize = mesh.tex_polygons[0][0].vertices.size();
1969  UDEBUG("polygon size=%d", polygonSize);
1970 
1971  for(unsigned int k=0; k<mesh.tex_coordinates.size(); ++k)
1972  {
1973  for(unsigned int i=0; i<mesh.tex_coordinates[k].size(); i+=polygonSize)
1974  {
1975  for(int j=0; j<polygonSize; ++j)
1976  {
1977  const Eigen::Vector2f & uc1 = mesh.tex_coordinates[k][i + j];
1978  const Eigen::Vector2f & uc2 = mesh.tex_coordinates[k][i + (j+1)%polygonSize];
1979  Eigen::Vector2f edge = (uc1-uc2)*textureSize;
1980  edgeLengths.push_back(fabs(edge[0]));
1981  edgeLengths.push_back(fabs(edge[1]));
1982  }
1983  }
1984  }
1985  float edgeLength = 0.0f;
1986  if(edgeLengths.size())
1987  {
1988  std::sort(edgeLengths.begin(), edgeLengths.end());
1989  float m = uMean(edgeLengths.data(), edgeLengths.size());
1990  float stddev = std::sqrt(uVariance(edgeLengths.data(), edgeLengths.size(), m));
1991  edgeLength = m+stddev;
1992  decimation = 1 << 6;
1993  for(int i=1; i<=6; ++i)
1994  {
1995  if(float(1 << i) >= edgeLength)
1996  {
1997  decimation = 1 << i;
1998  break;
1999  }
2000  }
2001  }
2002 
2003  UDEBUG("edge length=%f decimation=%d", edgeLength, decimation);
2004  }
2005  }
2006  else
2007  {
2008  if(blendingDecimation > 1)
2009  {
2010  UASSERT(textureSize % blendingDecimation == 0);
2011  }
2012  decimation = blendingDecimation;
2013  UDEBUG("decimation=%d", decimation);
2014  }
2015 
2016  std::vector<cv::Mat> blendGains(materials);
2017  for(int i=0; i<materials;++i)
2018  {
2019  blendGains[i] = cv::Mat(globalTextures.rows/decimation, globalTextures.rows/decimation, CV_32FC3, cv::Scalar::all(1.0f));
2020  }
2021 
2022  for(unsigned int p=0; p<vertexToPixels.size(); ++p)
2023  {
2024  if(vertexToPixels[p].size() > 1)
2025  {
2026  std::vector<float> gainsB(vertexToPixels[p].size());
2027  std::vector<float> gainsG(vertexToPixels[p].size());
2028  std::vector<float> gainsR(vertexToPixels[p].size());
2029  float sumWeight = 0.0f;
2030  int k=0;
2031  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[p].begin(); iter!=vertexToPixels[p].end(); ++iter)
2032  {
2033  if(materialsKept.at(iter->first))
2034  {
2035  int u = iter->second.x*emptyImage.cols + imageOrigin[iter->first].x;
2036  int v = (1.0-iter->second.y)*emptyImage.rows + imageOrigin[iter->first].y;
2037  float x = iter->second.x - 0.5f;
2038  float y = iter->second.y - 0.5f;
2039  float weight = 0.7f - sqrt(x*x+y*y);
2040  if(weight<0.0f)
2041  {
2042  weight = 0.0f;
2043  }
2044  int indexMaterial = newCamIndex[iter->first] / (cols*rows);
2045  cv::Vec3b * pt = globalTextures.ptr<cv::Vec3b>(v,u+indexMaterial*globalTextures.rows);
2046  gainsB[k] = static_cast<double>(pt->val[0]) * weight;
2047  gainsG[k] = static_cast<double>(pt->val[1]) * weight;
2048  gainsR[k] = static_cast<double>(pt->val[2]) * weight;
2049  sumWeight += weight;
2050  ++k;
2051  }
2052  }
2053  gainsB.resize(k);
2054  gainsG.resize(k);
2055  gainsR.resize(k);
2056 
2057  if(sumWeight > 0)
2058  {
2059  float targetColor[3];
2060  targetColor[0] = uSum(gainsB.data(), gainsB.size()) / sumWeight;
2061  targetColor[1] = uSum(gainsG.data(), gainsG.size()) / sumWeight;
2062  targetColor[2] = uSum(gainsR.data(), gainsR.size()) / sumWeight;
2063  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[p].begin(); iter!=vertexToPixels[p].end(); ++iter)
2064  {
2065  if(materialsKept.at(iter->first))
2066  {
2067  int u = iter->second.x*emptyImage.cols + imageOrigin[iter->first].x;
2068  int v = (1.0-iter->second.y)*emptyImage.rows + imageOrigin[iter->first].y;
2069  int indexMaterial = newCamIndex[iter->first] / (cols*rows);
2070  cv::Vec3b * pt = globalTextures.ptr<cv::Vec3b>(v,u+indexMaterial*globalTextures.rows);
2071  float gB = targetColor[0]/(pt->val[0]==0?1.0f:pt->val[0]);
2072  float gG = targetColor[1]/(pt->val[1]==0?1.0f:pt->val[1]);
2073  float gR = targetColor[2]/(pt->val[2]==0?1.0f:pt->val[2]);
2074  cv::Vec3f * ptr = blendGains[indexMaterial].ptr<cv::Vec3f>(v/decimation, u/decimation);
2075  ptr->val[0] = (gB>1.3f)?1.3f:(gB<0.7f)?0.7f:gB;
2076  ptr->val[1] = (gG>1.3f)?1.3f:(gG<0.7f)?0.7f:gG;
2077  ptr->val[2] = (gR>1.3f)?1.3f:(gR<0.7f)?0.7f:gR;
2078  }
2079  }
2080  }
2081  }
2082  }
2083 
2084  if(blendingGainsOut)
2085  {
2086  for(int t=0; t<(int)textures.size(); ++t)
2087  {
2088  //break;
2089  if(materialsKept.at(t))
2090  {
2091  int u = imageOrigin[t].x/decimation;
2092  int v = imageOrigin[t].y/decimation;
2093 
2094  int indexMaterial = newCamIndex[t] / (cols*rows);
2095  cv::Mat roi = blendGains[indexMaterial](cv::Rect(u, v, emptyImage.cols/decimation, emptyImage.rows/decimation));
2096  if(blendingGainsOut->find(textures[t].first) == blendingGainsOut->end())
2097  {
2098  std::map<int,cv::Mat> value;
2099  value.insert(std::make_pair(textures[t].second, roi.clone()));
2100  blendingGainsOut->insert(std::make_pair(textures[t].first, value));
2101  }
2102  else
2103  {
2104  blendingGainsOut->at(textures[t].first).insert(std::make_pair(textures[t].second, roi.clone()));
2105  }
2106  }
2107  }
2108  }
2109 
2110  for(int i=0; i<materials; ++i)
2111  {
2112  /*std::vector<cv::Mat> channels;
2113  cv::split(blendGains, channels);
2114  cv::Mat img;
2115  channels[0].convertTo(img,CV_8U,128.0,0);
2116  cv::imwrite("blendSmallB.png", img);
2117  channels[1].convertTo(img,CV_8U,128.0,0);
2118  cv::imwrite("blendSmallG.png", img);
2119  channels[2].convertTo(img,CV_8U,128.0,0);
2120  cv::imwrite("blendSmallR.png", img);*/
2121 
2122  cv::Mat globalTexturesROI = globalTextures(cv::Range::all(), cv::Range(i*globalTextures.rows, (i+1)*globalTextures.rows));
2123  cv::Mat dst;
2124  cv::blur(blendGains[i], dst, cv::Size(3,3));
2125  cv::resize(dst, blendGains[i], globalTexturesROI.size(), 0, 0, cv::INTER_LINEAR);
2126 
2127  /*cv::split(blendGains, channels);
2128  channels[0].convertTo(img,CV_8U,128.0,0);
2129  cv::imwrite("blendFullB.png", img);
2130  channels[1].convertTo(img,CV_8U,128.0,0);
2131  cv::imwrite("blendFullG.png", img);
2132  channels[2].convertTo(img,CV_8U,128.0,0);
2133  cv::imwrite("blendFullR.png", img);*/
2134 
2135  cv::multiply(globalTexturesROI, blendGains[i], globalTexturesROI, 1.0, CV_8UC3);
2136 
2137  //UWARN("Saving blending.png", globalTexture);
2138  //cv::imwrite("blending.png", globalTexture);
2139  }
2140 
2141  if(state) state->callback(uFormat("Blending (decimation=%d) %fs", decimation, timer.ticks()));
2142  }
2143  }
2144 
2145  if(brightnessContrastRatioLow > 0 || brightnessContrastRatioHigh > 0)
2146  {
2147  if(exposureFusion)
2148  {
2149  std::vector<cv::Mat> images;
2150  images.push_back(globalTextures);
2151  if (brightnessContrastRatioLow > 0)
2152  {
2153  images.push_back(util2d::brightnessAndContrastAuto(
2154  globalTextures,
2155  globalTextureMasks,
2156  (float)brightnessContrastRatioLow,
2157  0.0f));
2158  }
2159  if (brightnessContrastRatioHigh > 0)
2160  {
2161  images.push_back(util2d::brightnessAndContrastAuto(
2162  globalTextures,
2163  globalTextureMasks,
2164  0.0f,
2165  (float)brightnessContrastRatioHigh));
2166  }
2167 
2168  globalTextures = util2d::exposureFusion(images);
2169  }
2170  else
2171  {
2172  float alpha, beta;
2173  globalTextures = util2d::brightnessAndContrastAuto(
2174  globalTextures,
2175  globalTextureMasks,
2176  (float)brightnessContrastRatioLow,
2177  (float)brightnessContrastRatioHigh,
2178  &alpha,
2179  &beta);
2180  if(contrastValuesOut)
2181  {
2182  contrastValuesOut->first = alpha;
2183  contrastValuesOut->second = beta;
2184  }
2185  }
2186  if(state) state->callback(uFormat("Brightness and contrast auto %fs", timer.ticks()));
2187  }
2188  }
2189  }
2190  }
2191  UDEBUG("globalTextures=%d", globalTextures.cols?globalTextures.cols / globalTextures.rows:0);
2192  return globalTextures;
2193 }
2194 
2195 void fixTextureMeshForVisualization(pcl::TextureMesh & textureMesh)
2196 {
2197  // VTK issue:
2198  // tex_coordinates should be linked to points, not
2199  // polygon vertices. Points linked to multiple different TCoords (different textures) should
2200  // be duplicated.
2201  for (unsigned int t = 0; t < textureMesh.tex_coordinates.size(); ++t)
2202  {
2203  if(textureMesh.tex_polygons[t].size())
2204  {
2205  pcl::PointCloud<pcl::PointXYZ>::Ptr originalCloud(new pcl::PointCloud<pcl::PointXYZ>);
2206  pcl::fromPCLPointCloud2(textureMesh.cloud, *originalCloud);
2207 
2208  // make a cloud with as many points than polygon vertices
2209  unsigned int nPoints = textureMesh.tex_coordinates[t].size();
2210  UASSERT(nPoints == textureMesh.tex_polygons[t].size()*textureMesh.tex_polygons[t][0].vertices.size()); // assuming polygon size is constant!
2211 
2212  pcl::PointCloud<pcl::PointXYZ>::Ptr newCloud(new pcl::PointCloud<pcl::PointXYZ>);
2213  newCloud->resize(nPoints);
2214 
2215  unsigned int oi = 0;
2216  for (unsigned int i = 0; i < textureMesh.tex_polygons[t].size(); ++i)
2217  {
2218  pcl::Vertices & vertices = textureMesh.tex_polygons[t][i];
2219 
2220  for(unsigned int j=0; j<vertices.vertices.size(); ++j)
2221  {
2222  UASSERT(oi < newCloud->size());
2223  UASSERT_MSG((size_t)vertices.vertices[j] < originalCloud->size(), uFormat("%d vs %d", vertices.vertices[j], (int)originalCloud->size()).c_str());
2224  newCloud->at(oi) = originalCloud->at(vertices.vertices[j]);
2225  vertices.vertices[j] = oi; // new vertex index
2226  ++oi;
2227  }
2228  }
2229  pcl::toPCLPointCloud2(*newCloud, textureMesh.cloud);
2230  }
2231  }
2232 }
2233 
2235  const std::string & outputOBJPath,
2236  const pcl::PCLPointCloud2 & cloud,
2237  const std::vector<pcl::Vertices> & polygons,
2238  const std::map<int, Transform> & cameraPoses,
2239  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels, // required output of util3d::createTextureMesh()
2240  const std::map<int, cv::Mat> & images, // raw or compressed, can be empty if memory or dbDriver should be used
2241  const std::map<int, std::vector<CameraModel> > & cameraModels, // Should match images
2242  const Memory * memory, // Should be set if images are not set
2243  const DBDriver * dbDriver, // Should be set if images and memory are not set
2244  int textureSize,
2245  const std::string & textureFormat,
2246  const std::map<int, std::map<int, cv::Vec4d> > & gains, // optional output of util3d::mergeTextures()
2247  const std::map<int, std::map<int, cv::Mat> > & blendingGains, // optional output of util3d::mergeTextures()
2248  const std::pair<float, float> & contrastValues, // optional output of util3d::mergeTextures()
2249  bool gainRGB)
2250 {
2251  return multiBandTexturing(
2252  outputOBJPath,
2253  cloud,
2254  polygons,
2255  cameraPoses,
2256  vertexToPixels,
2257  images,
2258  cameraModels,
2259  memory,
2260  dbDriver,
2261  textureSize,
2262  2,
2263  "1 5 10 0",
2264  textureFormat,
2265  gains,
2266  blendingGains,
2267  contrastValues,
2268  gainRGB);
2269 }
2270 
2272  const std::string & outputOBJPath,
2273  const pcl::PCLPointCloud2 & cloud,
2274  const std::vector<pcl::Vertices> & polygons,
2275  const std::map<int, Transform> & cameraPoses,
2276  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels,
2277  const std::map<int, cv::Mat> & images,
2278  const std::map<int, std::vector<CameraModel> > & cameraModels,
2279  const Memory * memory,
2280  const DBDriver * dbDriver,
2281  unsigned int textureSize,
2282  unsigned int textureDownScale,
2283  const std::string & nbContrib,
2284  const std::string & textureFormat,
2285  const std::map<int, std::map<int, cv::Vec4d> > & gains,
2286  const std::map<int, std::map<int, cv::Mat> > & blendingGains,
2287  const std::pair<float, float> & contrastValues,
2288  bool gainRGB,
2289  unsigned int unwrapMethod,
2290  bool fillHoles,
2291  unsigned int padding,
2292  double bestScoreThreshold,
2293  double angleHardThreshold,
2294  bool forceVisibleByAllVertices)
2295 {
2296 
2297 #ifdef RTABMAP_ALICE_VISION
2299  {
2300  system::Logger::get()->setLogLevel(system::EVerboseLevel::Trace);
2301  }
2302  else if(ULogger::level() == ULogger::kInfo)
2303  {
2304  system::Logger::get()->setLogLevel(system::EVerboseLevel::Info);
2305  }
2306  else if(ULogger::level() == ULogger::kWarning)
2307  {
2308  system::Logger::get()->setLogLevel(system::EVerboseLevel::Warning);
2309  }
2310  else
2311  {
2312  system::Logger::get()->setLogLevel(system::EVerboseLevel::Error);
2313  }
2314 
2315  sfmData::SfMData sfmData;
2316  pcl::PointCloud<pcl::PointXYZRGB> cloud2;
2317  pcl::fromPCLPointCloud2(cloud, cloud2);
2318  UASSERT(vertexToPixels.size() == cloud2.size());
2319  UINFO("Input mesh: %d points %d polygons", (int)cloud2.size(), (int)polygons.size());
2320  mesh::Texturing texturing;
2321 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2322  texturing.mesh = new mesh::Mesh();
2323  texturing.mesh->pts.resize(cloud2.size());
2324  texturing.mesh->pointsVisibilities.resize(cloud2.size());
2325 #else
2326  texturing.me = new mesh::Mesh();
2327  texturing.me->pts = new StaticVector<Point3d>(cloud2.size());
2328  texturing.pointsVisibilities = new mesh::PointsVisibility();
2329  texturing.pointsVisibilities->reserve(cloud2.size());
2330 #endif
2331  texturing.texParams.textureSide = textureSize;
2332  texturing.texParams.downscale = textureDownScale;
2333  std::vector<int> multiBandNbContrib;
2334  std::list<std::string> values = uSplit(nbContrib, ' ');
2335  for(std::list<std::string>::iterator iter=values.begin(); iter!=values.end(); ++iter)
2336  {
2337  multiBandNbContrib.push_back(uStr2Int(*iter));
2338  }
2339  if(multiBandNbContrib.size() != 4)
2340  {
2341  UERROR("multiband: Wrong number of nb of contribution (vaue=\"%s\", should be 4), using default values instead.", nbContrib.c_str());
2342  }
2343  else
2344  {
2345  texturing.texParams.multiBandNbContrib = multiBandNbContrib;
2346  }
2347  texturing.texParams.padding = padding;
2348  texturing.texParams.fillHoles = fillHoles;
2349  texturing.texParams.bestScoreThreshold = bestScoreThreshold;
2350  texturing.texParams.angleHardThreshold = angleHardThreshold;
2351  texturing.texParams.forceVisibleByAllVertices = forceVisibleByAllVertices;
2352  texturing.texParams.visibilityRemappingMethod = mesh::EVisibilityRemappingMethod::Pull;
2353 
2354 
2355  for(size_t i=0;i<cloud2.size();++i)
2356  {
2357  pcl::PointXYZRGB pt = cloud2.at(i);
2358 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2359  texturing.mesh->pointsVisibilities[i].reserve(vertexToPixels[i].size());
2360  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[i].begin(); iter!=vertexToPixels[i].end();++iter)
2361  {
2362  texturing.mesh->pointsVisibilities[i].push_back(iter->first);
2363  }
2364  texturing.mesh->pts[i] = Point3d(pt.x, pt.y, pt.z);
2365 #else
2366  mesh::PointVisibility* pointVisibility = new mesh::PointVisibility();
2367  pointVisibility->reserve(vertexToPixels[i].size());
2368  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[i].begin(); iter!=vertexToPixels[i].end();++iter)
2369  {
2370  pointVisibility->push_back(iter->first);
2371  }
2372  texturing.pointsVisibilities->push_back(pointVisibility);
2373  (*texturing.me->pts)[i] = Point3d(pt.x, pt.y, pt.z);
2374 #endif
2375  }
2376 
2377 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2378  texturing.mesh->tris.resize(polygons.size());
2379  texturing.mesh->trisMtlIds().resize(polygons.size());
2380 #else
2381  texturing.me->tris = new StaticVector<mesh::Mesh::triangle>(polygons.size());
2382 #endif
2383  for(size_t i=0;i<polygons.size();++i)
2384  {
2385  UASSERT(polygons[i].vertices.size() == 3);
2386 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2387  texturing.mesh->trisMtlIds()[i] = -1;
2388  texturing.mesh->tris[i] = mesh::Mesh::triangle(
2389 #else
2390  (*texturing.me->tris)[i] = mesh::Mesh::triangle(
2391 #endif
2392  polygons[i].vertices[0],
2393  polygons[i].vertices[1],
2394  polygons[i].vertices[2]);
2395  }
2396  UTimer timer;
2397  std::string outputDirectory = UDirectory::getDir(outputOBJPath);
2398  std::string tmpImageDirectory = outputDirectory+"/rtabmap_tmp_textures";
2399  UDirectory::removeDir(tmpImageDirectory);
2400  UDirectory::makeDir(tmpImageDirectory);
2401  UINFO("Temporary saving images from %ld nodes in directory \"%s\"...", cameraPoses.size(), tmpImageDirectory.c_str());
2402  int viewId = 0;
2403  for(std::map<int, Transform>::const_iterator iter = cameraPoses.lower_bound(1); iter!=cameraPoses.end(); ++iter)
2404  {
2405  int camId = iter->first;
2406  cv::Mat image;
2407  std::vector<CameraModel> models;
2408 
2409  if( images.find(camId) != images.end() &&
2410  !images.find(camId)->second.empty() &&
2411  cameraModels.find(camId) != cameraModels.end())
2412  {
2413  image = images.find(camId)->second;
2414  models = cameraModels.find(camId)->second;
2415  }
2416  else if(memory)
2417  {
2418  SensorData data = memory->getNodeData(camId, true, false, false, false);
2419  models = data.cameraModels();
2420  if(models.empty() && data.stereoCameraModels().size())
2421  {
2422  for(size_t i=0; i<data.stereoCameraModels().size(); ++i)
2423  {
2424  models.push_back(data.stereoCameraModels()[i].left());
2425  }
2426  }
2427  if(data.imageRaw().empty())
2428  {
2429  image = data.imageCompressed();
2430  }
2431  else
2432  {
2433  image = data.imageRaw();
2434  }
2435 
2436  if(models.empty() || image.empty())
2437  {
2438  Transform odomPose;
2439  int mapId;
2440  int weight=0;
2441  std::string label;
2442  double stamp;
2443  Transform gt;
2444  std::vector<float> vel;
2445  GPS gps;
2446  EnvSensors envs;
2447  memory->getNodeInfo(camId, odomPose, mapId, weight, label, stamp, gt, vel, gps, envs, true);
2448  if(weight == -1) // just ignore intermediate nodes if their data is not set
2449  continue;
2450  }
2451  }
2452  else if(dbDriver)
2453  {
2454  std::vector<StereoCameraModel> stereoModels;
2455  dbDriver->getCalibration(camId, models, stereoModels);
2456  if(models.empty() && stereoModels.size())
2457  {
2458  for(size_t i=0; i<stereoModels.size(); ++i)
2459  {
2460  models.push_back(stereoModels[i].left());
2461  }
2462  }
2463 
2464  SensorData data;
2465  dbDriver->getNodeData(camId, data, true, false, false, false);
2466  if(data.imageRaw().empty())
2467  {
2468  image = data.imageCompressed();
2469  }
2470  else
2471  {
2472  image = data.imageRaw();
2473  }
2474 
2475  if(models.empty() || image.empty())
2476  {
2477  int weight=0;
2478  dbDriver->getWeight(camId, weight);
2479  if(weight == -1) // just ignore intermediate nodes if their data is not set
2480  continue;
2481  }
2482  }
2483  if(models.empty())
2484  {
2485  UERROR("No camera models found for camera %d. Aborting multiband texturing...", iter->first);
2486  return false;
2487  }
2488  if(image.empty())
2489  {
2490  UERROR("No image found for camera %d. Aborting multiband texturing...", iter->first);
2491  return false;
2492  }
2493 
2494  if(image.rows == 1 && image.type() == CV_8UC1)
2495  {
2496  image = uncompressImage(image);
2497  }
2498  else
2499  {
2500  image = image.clone();
2501  }
2502 
2503  for(size_t i=0; i<models.size(); ++i)
2504  {
2505  const CameraModel & model = models.at(i);
2506  cv::Size imageSize = model.imageSize();
2507  if(imageSize.height == 0)
2508  {
2509  // backward compatibility
2510  imageSize.height = image.rows;
2511  imageSize.width = image.cols;
2512  }
2513 
2514  UASSERT(image.cols % imageSize.width == 0);
2515  cv::Mat imageRoi = image.colRange(i*imageSize.width, (i+1)*imageSize.width);
2516 
2517  if(gains.find(camId) != gains.end() &&
2518  gains.at(camId).find(i) != gains.at(camId).end())
2519  {
2520  const cv::Vec4d & g = gains.at(camId).at(i);
2521  if(imageRoi.channels() == 1)
2522  {
2523  cv::multiply(imageRoi, g.val[0], imageRoi);
2524  }
2525  else
2526  {
2527  std::vector<cv::Mat> channels;
2528  cv::split(imageRoi, channels);
2529 
2530  // assuming BGR
2531  cv::multiply(channels[0], g.val[gainRGB?3:0], channels[0]);
2532  cv::multiply(channels[1], g.val[gainRGB?2:0], channels[1]);
2533  cv::multiply(channels[2], g.val[gainRGB?1:0], channels[2]);
2534 
2535  cv::Mat output;
2536  cv::merge(channels, output);
2537  imageRoi = output;
2538  }
2539  }
2540 
2541  if(blendingGains.find(camId) != blendingGains.end() &&
2542  blendingGains.at(camId).find(i) != blendingGains.at(camId).end())
2543  {
2544  // Should be color for blending options
2545  if(imageRoi.channels() == 1)
2546  {
2547  cv::Mat imageRoiColor;
2548  cv::cvtColor(imageRoi, imageRoiColor, CV_GRAY2BGR);
2549  imageRoi = imageRoiColor;
2550  }
2551 
2552  cv::Mat g = blendingGains.at(camId).at(i);
2553  cv::Mat dst;
2554  cv::blur(g, dst, cv::Size(3,3));
2555  cv::Mat gResized;
2556  cv::resize(dst, gResized, imageRoi.size(), 0, 0, cv::INTER_LINEAR);
2557  cv::Mat output;
2558  cv::multiply(imageRoi, gResized, output, 1.0, CV_8UC3);
2559  imageRoi = output;
2560  }
2561 
2562  Transform t = (iter->second * model.localTransform()).inverse();
2563  Eigen::Matrix<double, 3, 4> m = t.toEigen3d().matrix().block<3,4>(0, 0);
2564  sfmData::CameraPose pose(geometry::Pose3(m), true);
2565  sfmData.setAbsolutePose((IndexT)viewId, pose);
2566 
2567  UDEBUG("%d %d %f %f %f %f", imageSize.width, imageSize.height, model.fx(), model.fy(), model.cx(), model.cy());
2568  std::shared_ptr<camera::IntrinsicBase> camPtr = std::make_shared<camera::Pinhole>(
2569 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=4)
2570  //https://github.com/alicevision/AliceVision/commit/9fab5c79a1c65595fe5c5001267e1c5212bc93f0#diff-b0c0a3c30de50be8e4ed283dfe4c8ae4a9bc861aa9a83bd8bfda8182e9d67c08
2571  // [all] the camera principal point is now defined as an offset relative to the image center
2572  imageSize.width, imageSize.height, model.fx(), model.fy(), model.cx() - double(imageSize.width) * 0.5, model.cy() - double(imageSize.height) * 0.5);
2573 #else
2574  imageSize.width, imageSize.height, model.fx(), model.cx(), model.cy());
2575 #endif
2576  sfmData.intrinsics.insert(std::make_pair((IndexT)viewId, camPtr));
2577 
2578  std::string imagePath = tmpImageDirectory+uFormat("/%d.jpg", viewId);
2579  cv::imwrite(imagePath, imageRoi);
2580  std::shared_ptr<sfmData::View> viewPtr = std::make_shared<sfmData::View>(
2581  imagePath,
2582  (IndexT)viewId,
2583  (IndexT)viewId,
2584  (IndexT)viewId,
2585  imageSize.width,
2586  imageSize.height);
2587  sfmData.views.insert(std::make_pair((IndexT)viewId, viewPtr));
2588  ++viewId;
2589  }
2590  UDEBUG("camId=%d", camId);
2591  }
2592  UINFO("Temporary saving images in directory \"%s\"... done (%d images of %d nodes). %fs", tmpImageDirectory.c_str(), viewId, (int)cameraPoses.size(), timer.ticks());
2593 
2594  mvsUtils::MultiViewParams mp(sfmData);
2595 
2596  UINFO("Unwrapping (method=%d=%s)...", unwrapMethod, mesh::EUnwrapMethod_enumToString((mesh::EUnwrapMethod)unwrapMethod).c_str());
2597  texturing.unwrap(mp, (mesh::EUnwrapMethod)unwrapMethod);
2598  UINFO("Unwrapping done. %fs", timer.ticks());
2599 
2600  // save final obj file
2601  std::string baseName = uSplit(UFile::getName(outputOBJPath), '.').front();
2602 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=4)
2603  texturing.saveAs(outputDirectory, baseName, aliceVision::mesh::EFileType::OBJ, imageIO::EImageFileType::PNG);
2604 #else
2605  texturing.saveAsOBJ(outputDirectory, baseName);
2606 #endif
2607  UINFO("Saved %s. %fs", outputOBJPath.c_str(), timer.ticks());
2608 
2609  // generate textures
2610  UINFO("Generating textures...");
2611  texturing.generateTextures(mp, outputDirectory);
2612  UINFO("Generating textures done. %fs", timer.ticks());
2613 
2614  UINFO("Cleanup temporary directory \"%s\"...", tmpImageDirectory.c_str());
2615  UDirectory dir(tmpImageDirectory);
2616  std::string fp = dir.getNextFilePath();
2617  while(!fp.empty())
2618  {
2619  UFile::erase(fp);
2620  fp = dir.getNextFilePath();
2621  }
2622  UDirectory::removeDir(tmpImageDirectory);
2623  UINFO("Cleanup temporary directory \"%s\"... done.", tmpImageDirectory.c_str());
2624 
2625  UINFO("Rename/convert textures...");
2626  dir.setPath(outputDirectory, "png");
2627  std::map<std::string, std::string> texNames; // <old, new>
2628  std::string outputFormat = textureFormat;
2629  if(outputFormat.front() == '.')
2630  {
2631  outputFormat = outputFormat.substr(1, std::string::npos);
2632  }
2633  for(std::list<std::string>::const_iterator iter=dir.getFileNames().begin(); iter!=dir.getFileNames().end(); ++iter)
2634  {
2635  // Textures are called "texture_1001.png", "texture_1002.png", ...
2636  if(uStrContains(*iter, "texture_10"))
2637  {
2638  cv::Mat img = cv::imread(outputDirectory+"/"+*iter);
2639  if(contrastValues.first != 0.0f || contrastValues.second != 0.0f)
2640  {
2641  UASSERT(img.channels() == 3);
2642  // Re-use same contrast values with all images
2643  UINFO("Apply contrast values %f %f", contrastValues.first, contrastValues.second);
2644  img.convertTo(img, -1, contrastValues.first, contrastValues.second);
2645  }
2646  std::string newName = *iter;
2647  boost::replace_all(newName, "png", outputFormat);
2648  boost::replace_all(newName, "texture", baseName);
2649  texNames.insert(std::make_pair(*iter, newName));
2650  cv::imwrite(outputDirectory+"/"+newName, img);
2651  UFile::erase(outputDirectory+"/"+*iter);
2652  }
2653  }
2654  std::ifstream fi(outputDirectory+"/"+baseName+".mtl");
2655  std::string mtlStr((std::istreambuf_iterator<char>(fi)),
2656  std::istreambuf_iterator<char>());
2657  fi.close();
2658  UFile::erase(outputDirectory+"/"+baseName);
2659  for(std::map<std::string, std::string>::iterator iter=texNames.begin(); iter!=texNames.end(); ++iter)
2660  {
2661  boost::replace_all(mtlStr, iter->first, iter->second);
2662  }
2663  std::ofstream fo(outputDirectory+"/"+baseName+".mtl");
2664  fo.write(mtlStr.c_str(), mtlStr.size());
2665  fo.close();
2666  UINFO("Rename/convert textures... done. %fs", timer.ticks());
2667 
2668 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2669  UINFO("Cleanup sfmdata...");
2670  sfmData.clear();
2671  UINFO("Cleanup sfmdata... done. %fs", timer.ticks());
2672 #endif
2673 
2674  return true;
2675 #else
2676  UERROR("Cannot unwrap texture mesh. RTAB-Map is not built with Alice Vision support! Returning false.");
2677  return false;
2678 #endif
2679 }
2680 
2682  const LaserScan & laserScan,
2683  int searchK,
2684  float searchRadius)
2685 {
2686  if(laserScan.isEmpty())
2687  {
2688  return laserScan;
2689  }
2690 
2691  pcl::PointCloud<pcl::Normal>::Ptr normals;
2692  // convert to compatible PCL format and filter it
2693  if(laserScan.hasRGB())
2694  {
2695  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = laserScanToPointCloudRGB(laserScan);
2696  if(cloud->size())
2697  {
2698  UASSERT(!laserScan.is2d());
2699  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, searchK, searchRadius);
2700  return LaserScan(laserScanFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), laserScan.localTransform());
2701  }
2702  }
2703  else if(laserScan.hasIntensity())
2704  {
2705  pcl::PointCloud<pcl::PointXYZI>::Ptr cloud = laserScanToPointCloudI(laserScan);
2706  if(cloud->size())
2707  {
2708  if(laserScan.is2d())
2709  {
2710  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals2D(cloud, searchK, searchRadius);
2711  if(laserScan.angleIncrement() > 0.0f)
2712  {
2713  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), laserScan.rangeMin(), laserScan.rangeMax(), laserScan.angleMin(), laserScan.angleMax(), laserScan.angleIncrement(), laserScan.localTransform());
2714  }
2715  else
2716  {
2717  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), laserScan.localTransform());
2718  }
2719 
2720  }
2721  else
2722  {
2723  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, searchK, searchRadius);
2724  return LaserScan(laserScanFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), laserScan.localTransform());
2725  }
2726  }
2727  }
2728  else
2729  {
2730  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = laserScanToPointCloud(laserScan);
2731  if(cloud->size())
2732  {
2733  if(laserScan.is2d())
2734  {
2735  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals2D(cloud, searchK, searchRadius);
2736  if(laserScan.angleIncrement() > 0.0f)
2737  {
2738  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), laserScan.rangeMin(), laserScan.rangeMax(), laserScan.angleMin(), laserScan.angleMax(), laserScan.angleIncrement(), laserScan.localTransform());
2739  }
2740  else
2741  {
2742  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), laserScan.localTransform());
2743  }
2744  }
2745  else
2746  {
2747  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, searchK, searchRadius);
2748  return LaserScan(laserScanFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), laserScan.localTransform());
2749  }
2750  }
2751  }
2752  return LaserScan();
2753 }
2754 
2755 template<typename PointT>
2756 pcl::PointCloud<pcl::Normal>::Ptr computeNormalsImpl(
2757  const typename pcl::PointCloud<PointT>::Ptr & cloud,
2758  const pcl::IndicesPtr & indices,
2759  int searchK,
2760  float searchRadius,
2761  const Eigen::Vector3f & viewPoint)
2762 {
2763  typename pcl::search::KdTree<PointT>::Ptr tree (new pcl::search::KdTree<PointT>);
2764  if(indices->size())
2765  {
2766  tree->setInputCloud(cloud, indices);
2767  }
2768  else
2769  {
2770  tree->setInputCloud (cloud);
2771  }
2772 
2773  // Normal estimation*
2774 #ifdef PCL_OMP
2775  pcl::NormalEstimationOMP<PointT, pcl::Normal> n;
2776 #else
2777  pcl::NormalEstimation<PointT, pcl::Normal> n;
2778 #endif
2779  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
2780  n.setInputCloud (cloud);
2781  // Commented: Keep the output normals size the same as the input cloud
2782  //if(indices->size())
2783  //{
2784  // n.setIndices(indices);
2785  //}
2786  n.setSearchMethod (tree);
2787  n.setKSearch (searchK);
2788  n.setRadiusSearch (searchRadius);
2789  n.setViewPoint(viewPoint[0], viewPoint[1], viewPoint[2]);
2790  n.compute (*normals);
2791 
2792  return normals;
2793 }
2794 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2795  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
2796  int searchK,
2797  float searchRadius,
2798  const Eigen::Vector3f & viewPoint)
2799 {
2800  pcl::IndicesPtr indices(new std::vector<int>);
2801  return computeNormals(cloud, indices, searchK, searchRadius, viewPoint);
2802 }
2803 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2804  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
2805  int searchK,
2806  float searchRadius,
2807  const Eigen::Vector3f & viewPoint)
2808 {
2809  pcl::IndicesPtr indices(new std::vector<int>);
2810  return computeNormals(cloud, indices, searchK, searchRadius, viewPoint);
2811 }
2812 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2813  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
2814  int searchK,
2815  float searchRadius,
2816  const Eigen::Vector3f & viewPoint)
2817 {
2818  pcl::IndicesPtr indices(new std::vector<int>);
2819  return computeNormals(cloud, indices, searchK, searchRadius, viewPoint);
2820 }
2821 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2822  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
2823  const pcl::IndicesPtr & indices,
2824  int searchK,
2825  float searchRadius,
2826  const Eigen::Vector3f & viewPoint)
2827 {
2828  return computeNormalsImpl<pcl::PointXYZ>(cloud, indices, searchK, searchRadius, viewPoint);
2829 }
2830 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2831  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
2832  const pcl::IndicesPtr & indices,
2833  int searchK,
2834  float searchRadius,
2835  const Eigen::Vector3f & viewPoint)
2836 {
2837  return computeNormalsImpl<pcl::PointXYZRGB>(cloud, indices, searchK, searchRadius, viewPoint);
2838 }
2839 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2840  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
2841  const pcl::IndicesPtr & indices,
2842  int searchK,
2843  float searchRadius,
2844  const Eigen::Vector3f & viewPoint)
2845 {
2846  return computeNormalsImpl<pcl::PointXYZI>(cloud, indices, searchK, searchRadius, viewPoint);
2847 }
2848 
2849 template<typename PointT>
2850 pcl::PointCloud<pcl::Normal>::Ptr computeNormals2DImpl(
2851  const typename pcl::PointCloud<PointT>::Ptr & cloud,
2852  int searchK,
2853  float searchRadius,
2854  const Eigen::Vector3f & viewPoint)
2855 {
2856  UASSERT(searchK>0 || searchRadius>0.0f);
2857  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
2858 
2859  typename pcl::search::KdTree<PointT>::Ptr tree (new pcl::search::KdTree<PointT>);
2860  tree->setInputCloud (cloud);
2861 
2862  normals->resize(cloud->size());
2863 
2864  float bad_point = std::numeric_limits<float>::quiet_NaN ();
2865 
2866  // assuming that points are ordered
2867  for(unsigned int i=0; i<cloud->size(); ++i)
2868  {
2869  const PointT & pt = cloud->at(i);
2870  std::vector<Eigen::Vector3f> neighborNormals;
2871  Eigen::Vector3f direction;
2872  direction[0] = viewPoint[0] - pt.x;
2873  direction[1] = viewPoint[1] - pt.y;
2874  direction[2] = viewPoint[2] - pt.z;
2875 
2876  std::vector<int> k_indices;
2877  std::vector<float> k_sqr_distances;
2878  if(searchRadius>0.0f)
2879  {
2880  tree->radiusSearch(cloud->at(i), searchRadius, k_indices, k_sqr_distances, searchK);
2881  }
2882  else
2883  {
2884  tree->nearestKSearch(cloud->at(i), searchK, k_indices, k_sqr_distances);
2885  }
2886 
2887  for(unsigned int j=0; j<k_indices.size(); ++j)
2888  {
2889  if(k_indices.at(j) != (int)i)
2890  {
2891  const PointT & pt2 = cloud->at(k_indices.at(j));
2892  Eigen::Vector3f v(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
2893  Eigen::Vector3f up = v.cross(direction);
2894  Eigen::Vector3f n = up.cross(v);
2895  n.normalize();
2896  neighborNormals.push_back(n);
2897  }
2898  }
2899 
2900  if(neighborNormals.empty())
2901  {
2902  normals->at(i).normal_x = bad_point;
2903  normals->at(i).normal_y = bad_point;
2904  normals->at(i).normal_z = bad_point;
2905  }
2906  else
2907  {
2908  Eigen::Vector3f meanNormal(0,0,0);
2909  for(unsigned int j=0; j<neighborNormals.size(); ++j)
2910  {
2911  meanNormal+=neighborNormals[j];
2912  }
2913  meanNormal /= (float)neighborNormals.size();
2914  meanNormal.normalize();
2915  normals->at(i).normal_x = meanNormal[0];
2916  normals->at(i).normal_y = meanNormal[1];
2917  normals->at(i).normal_z = meanNormal[2];
2918  }
2919  }
2920 
2921  return normals;
2922 }
2923 pcl::PointCloud<pcl::Normal>::Ptr computeNormals2D(
2924  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
2925  int searchK,
2926  float searchRadius,
2927  const Eigen::Vector3f & viewPoint)
2928 {
2929  return computeNormals2DImpl<pcl::PointXYZ>(cloud, searchK, searchRadius, viewPoint);
2930 }
2931 pcl::PointCloud<pcl::Normal>::Ptr computeNormals2D(
2932  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
2933  int searchK,
2934  float searchRadius,
2935  const Eigen::Vector3f & viewPoint)
2936 {
2937  return computeNormals2DImpl<pcl::PointXYZI>(cloud, searchK, searchRadius, viewPoint);
2938 }
2939 
2940 template<typename PointT>
2941 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals2DImpl(
2942  const typename pcl::PointCloud<PointT>::Ptr & cloud,
2943  int searchK,
2944  float searchRadius,
2945  const Eigen::Vector3f & viewPoint)
2946 {
2947  UASSERT(searchK>0);
2948  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
2949 
2950  normals->resize(cloud->size());
2951  searchRadius *= searchRadius; // squared distance
2952 
2953  float bad_point = std::numeric_limits<float>::quiet_NaN ();
2954 
2955  // assuming that points are ordered
2956  for(int i=0; i<(int)cloud->size(); ++i)
2957  {
2958  int li = i-searchK;
2959  if(li<0)
2960  {
2961  li=0;
2962  }
2963  int hi = i+searchK;
2964  if(hi>=(int)cloud->size())
2965  {
2966  hi=(int)cloud->size()-1;
2967  }
2968 
2969  // get points before not too far
2970  const PointT & pt = cloud->at(i);
2971  std::vector<Eigen::Vector3f> neighborNormals;
2972  Eigen::Vector3f direction;
2973  direction[0] = viewPoint[0] - cloud->at(i).x;
2974  direction[1] = viewPoint[1] - cloud->at(i).y;
2975  direction[2] = viewPoint[2] - cloud->at(i).z;
2976  for(int j=i-1; j>=li; --j)
2977  {
2978  const PointT & pt2 = cloud->at(j);
2979  Eigen::Vector3f vd(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
2980  if(searchRadius<=0.0f || (vd[0]*vd[0] + vd[1]*vd[1] + vd[2]*vd[2]) < searchRadius)
2981  {
2982  Eigen::Vector3f v(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
2983  Eigen::Vector3f up = v.cross(direction);
2984  Eigen::Vector3f n = up.cross(v);
2985  n.normalize();
2986  neighborNormals.push_back(n);
2987  }
2988  else
2989  {
2990  break;
2991  }
2992  }
2993  for(int j=i+1; j<=hi; ++j)
2994  {
2995  const PointT & pt2 = cloud->at(j);
2996  Eigen::Vector3f vd(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
2997  if(searchRadius<=0.0f || (vd[0]*vd[0] + vd[1]*vd[1] + vd[2]*vd[2]) < searchRadius)
2998  {
2999  Eigen::Vector3f v(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
3000  Eigen::Vector3f up = v[2]==0.0f?Eigen::Vector3f(0,0,1):v.cross(direction);
3001  Eigen::Vector3f n = up.cross(v);
3002  n.normalize();
3003  neighborNormals.push_back(n);
3004  }
3005  else
3006  {
3007  break;
3008  }
3009  }
3010 
3011  if(neighborNormals.empty())
3012  {
3013  normals->at(i).normal_x = bad_point;
3014  normals->at(i).normal_y = bad_point;
3015  normals->at(i).normal_z = bad_point;
3016  }
3017  else
3018  {
3019  Eigen::Vector3f meanNormal(0,0,0);
3020  for(unsigned int j=0; j<neighborNormals.size(); ++j)
3021  {
3022  meanNormal+=neighborNormals[j];
3023  }
3024  meanNormal /= (float)neighborNormals.size();
3025  meanNormal.normalize();
3026  normals->at(i).normal_x = meanNormal[0];
3027  normals->at(i).normal_y = meanNormal[1];
3028  normals->at(i).normal_z = meanNormal[2];
3029  }
3030  }
3031 
3032  return normals;
3033 }
3034 
3035 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals2D(
3036  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
3037  int searchK,
3038  float searchRadius,
3039  const Eigen::Vector3f & viewPoint)
3040 {
3041  return computeFastOrganizedNormals2DImpl<pcl::PointXYZ>(cloud, searchK, searchRadius, viewPoint);
3042 }
3043 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals2D(
3044  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
3045  int searchK,
3046  float searchRadius,
3047  const Eigen::Vector3f & viewPoint)
3048 {
3049  return computeFastOrganizedNormals2DImpl<pcl::PointXYZI>(cloud, searchK, searchRadius, viewPoint);
3050 }
3051 
3052 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals(
3053  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
3054  float maxDepthChangeFactor,
3055  float normalSmoothingSize,
3056  const Eigen::Vector3f & viewPoint)
3057 {
3058  pcl::IndicesPtr indices(new std::vector<int>);
3059  return computeFastOrganizedNormals(cloud, indices, maxDepthChangeFactor, normalSmoothingSize, viewPoint);
3060 }
3061 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals(
3062  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
3063  const pcl::IndicesPtr & indices,
3064  float maxDepthChangeFactor,
3065  float normalSmoothingSize,
3066  const Eigen::Vector3f & viewPoint)
3067 {
3068  UASSERT(cloud->isOrganized());
3069 
3070  pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB>);
3071  if(indices->size())
3072  {
3073  tree->setInputCloud(cloud, indices);
3074  }
3075  else
3076  {
3077  tree->setInputCloud (cloud);
3078  }
3079 
3080  // Normal estimation
3081  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
3082  pcl::IntegralImageNormalEstimation<pcl::PointXYZRGB, pcl::Normal> ne;
3083  ne.setNormalEstimationMethod (ne.AVERAGE_3D_GRADIENT);
3084  ne.setMaxDepthChangeFactor(maxDepthChangeFactor);
3085  ne.setNormalSmoothingSize(normalSmoothingSize);
3086  ne.setBorderPolicy(ne.BORDER_POLICY_MIRROR);
3087  ne.setInputCloud(cloud);
3088  // Commented: Keep the output normals size the same as the input cloud
3089  //if(indices->size())
3090  //{
3091  // ne.setIndices(indices);
3092  //}
3093  ne.setSearchMethod(tree);
3094  ne.setViewPoint(viewPoint[0], viewPoint[1], viewPoint[2]);
3095  ne.compute(*normals);
3096 
3097  return normals;
3098 }
3099 
3101  const LaserScan & scan,
3102  const Transform & t,
3103  cv::Mat * pcaEigenVectors,
3104  cv::Mat * pcaEigenValues)
3105 {
3106  if(!scan.isEmpty() && (scan.hasNormals()))
3107  {
3108  //Construct a buffer used by the pca analysis
3109  int sz = static_cast<int>(scan.size()*2);
3110  bool is2d = scan.is2d();
3111  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3112  int oi = 0;
3113  int nOffset = scan.getNormalsOffset();
3114  bool doTransform = false;
3115  Transform tn;
3116  if(!t.isIdentity() || !scan.localTransform().isIdentity())
3117  {
3118  tn = (t*scan.localTransform()).rotation();
3119  doTransform = true;
3120  }
3121  for (int i = 0; i < scan.size(); ++i)
3122  {
3123  const float * ptrScan = scan.data().ptr<float>(0, i);
3124 
3125  if(is2d)
3126  {
3127  if(uIsFinite(ptrScan[nOffset]) && uIsFinite(ptrScan[nOffset+1]))
3128  {
3129  cv::Point3f n(ptrScan[nOffset], ptrScan[nOffset+1], 0);
3130  if(doTransform)
3131  {
3132  n = util3d::transformPoint(n, tn);
3133  }
3134  float * ptr = data_normals.ptr<float>(oi++, 0);
3135  ptr[0] = n.x;
3136  ptr[1] = n.y;
3137  }
3138  }
3139  else
3140  {
3141  if(uIsFinite(ptrScan[nOffset]) && uIsFinite(ptrScan[nOffset+1]) && uIsFinite(ptrScan[nOffset+2]))
3142  {
3143  cv::Point3f n(ptrScan[nOffset], ptrScan[nOffset+1], ptrScan[nOffset+2]);
3144  if(doTransform)
3145  {
3146  n = util3d::transformPoint(n, tn);
3147  }
3148  float * ptr = data_normals.ptr<float>(oi++, 0);
3149  ptr[0] = n.x;
3150  ptr[1] = n.y;
3151  ptr[2] = n.z;
3152  }
3153  }
3154  }
3155  if(oi>1)
3156  {
3157  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3158 
3159  if(pcaEigenVectors)
3160  {
3161  *pcaEigenVectors = pca_analysis.eigenvectors;
3162  }
3163  if(pcaEigenValues)
3164  {
3165  *pcaEigenValues = pca_analysis.eigenvalues;
3166  }
3167  UASSERT((is2d && pca_analysis.eigenvalues.total()>=2) || (!is2d && pca_analysis.eigenvalues.total()>=3));
3168  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3169  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3170  }
3171  }
3172  else if(!scan.isEmpty())
3173  {
3174  UERROR("Scan doesn't have normals!");
3175  }
3176  return 0.0f;
3177 }
3178 
3180  const pcl::PointCloud<pcl::PointNormal> & cloud,
3181  const Transform & t,
3182  bool is2d,
3183  cv::Mat * pcaEigenVectors,
3184  cv::Mat * pcaEigenValues)
3185 {
3186  //Construct a buffer used by the pca analysis
3187  int sz = static_cast<int>(cloud.size()*2);
3188  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3189  int oi = 0;
3190  bool doTransform = false;
3191  Transform tn;
3192  if(!t.isIdentity() && !t.isNull())
3193  {
3194  tn = t.rotation();
3195  doTransform = true;
3196  }
3197  for (unsigned int i = 0; i < cloud.size(); ++i)
3198  {
3199  const pcl::PointNormal & pt = cloud.at(i);
3200  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3201  if(doTransform)
3202  {
3203  n = util3d::transformPoint(n, tn);
3204  }
3205  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3206  {
3207  float * ptr = data_normals.ptr<float>(oi++, 0);
3208  ptr[0] = n.x;
3209  ptr[1] = n.y;
3210  if(!is2d)
3211  {
3212  ptr[2] = n.z;
3213  }
3214  }
3215  }
3216  if(oi>1)
3217  {
3218  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3219 
3220  if(pcaEigenVectors)
3221  {
3222  *pcaEigenVectors = pca_analysis.eigenvectors;
3223  }
3224  if(pcaEigenValues)
3225  {
3226  *pcaEigenValues = pca_analysis.eigenvalues;
3227  }
3228 
3229  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3230  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3231  }
3232  return 0.0f;
3233 }
3234 
3236  const pcl::PointCloud<pcl::Normal> & normals,
3237  const Transform & t,
3238  bool is2d,
3239  cv::Mat * pcaEigenVectors,
3240  cv::Mat * pcaEigenValues)
3241 {
3242  //Construct a buffer used by the pca analysis
3243  int sz = static_cast<int>(normals.size()*2);
3244  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3245  int oi = 0;
3246  bool doTransform = false;
3247  Transform tn;
3248  if(!t.isIdentity())
3249  {
3250  tn = t.rotation();
3251  doTransform = true;
3252  }
3253  for (unsigned int i = 0; i < normals.size(); ++i)
3254  {
3255  const pcl::Normal & pt = normals.at(i);
3256  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3257  if(doTransform)
3258  {
3259  n = util3d::transformPoint(n, tn);
3260  }
3261  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3262  {
3263  float * ptr = data_normals.ptr<float>(oi++, 0);
3264  ptr[0] = n.x;
3265  ptr[1] = n.y;
3266  if(!is2d)
3267  {
3268  ptr[2] = n.z;
3269  }
3270  }
3271  }
3272  if(oi>1)
3273  {
3274  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3275 
3276  if(pcaEigenVectors)
3277  {
3278  *pcaEigenVectors = pca_analysis.eigenvectors;
3279  }
3280  if(pcaEigenValues)
3281  {
3282  *pcaEigenValues = pca_analysis.eigenvalues;
3283  }
3284 
3285  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3286  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3287  }
3288  return 0.0f;
3289 }
3290 
3292  const pcl::PointCloud<pcl::PointXYZINormal> & cloud,
3293  const Transform & t,
3294  bool is2d,
3295  cv::Mat * pcaEigenVectors,
3296  cv::Mat * pcaEigenValues)
3297 {
3298  //Construct a buffer used by the pca analysis
3299  int sz = static_cast<int>(cloud.size()*2);
3300  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3301  int oi = 0;
3302  bool doTransform = false;
3303  Transform tn;
3304  if(!t.isIdentity())
3305  {
3306  tn = t.rotation();
3307  doTransform = true;
3308  }
3309  for (unsigned int i = 0; i < cloud.size(); ++i)
3310  {
3311  const pcl::PointXYZINormal & pt = cloud.at(i);
3312  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3313  if(doTransform)
3314  {
3315  n = util3d::transformPoint(n, tn);
3316  }
3317  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3318  {
3319  float * ptr = data_normals.ptr<float>(oi++, 0);
3320  ptr[0] = n.x;
3321  ptr[1] = n.y;
3322  if(!is2d)
3323  {
3324  ptr[2] = n.z;
3325  }
3326  }
3327  }
3328  if(oi>1)
3329  {
3330  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3331 
3332  if(pcaEigenVectors)
3333  {
3334  *pcaEigenVectors = pca_analysis.eigenvectors;
3335  }
3336  if(pcaEigenValues)
3337  {
3338  *pcaEigenValues = pca_analysis.eigenvalues;
3339  }
3340 
3341  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3342  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3343  }
3344  return 0.0f;
3345 }
3346 
3348  const pcl::PointCloud<pcl::PointXYZRGBNormal> & cloud,
3349  const Transform & t,
3350  bool is2d,
3351  cv::Mat * pcaEigenVectors,
3352  cv::Mat * pcaEigenValues)
3353 {
3354  //Construct a buffer used by the pca analysis
3355  int sz = static_cast<int>(cloud.size()*2);
3356  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3357  int oi = 0;
3358  bool doTransform = false;
3359  Transform tn;
3360  if(!t.isIdentity())
3361  {
3362  tn = t.rotation();
3363  doTransform = true;
3364  }
3365  for (unsigned int i = 0; i < cloud.size(); ++i)
3366  {
3367  const pcl::PointXYZRGBNormal & pt = cloud.at(i);
3368  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3369  if(doTransform)
3370  {
3371  n = util3d::transformPoint(n, tn);
3372  }
3373  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3374  {
3375  float * ptr = data_normals.ptr<float>(oi++, 0);
3376  ptr[0] = n.x;
3377  ptr[1] = n.y;
3378  if(!is2d)
3379  {
3380  ptr[2] = n.z;
3381  }
3382  }
3383  }
3384  if(oi>1)
3385  {
3386  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3387 
3388  if(pcaEigenVectors)
3389  {
3390  *pcaEigenVectors = pca_analysis.eigenvectors;
3391  }
3392  if(pcaEigenValues)
3393  {
3394  *pcaEigenValues = pca_analysis.eigenvalues;
3395  }
3396 
3397  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3398  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3399  }
3400  return 0.0f;
3401 }
3402 
3403 pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr mls(
3404  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
3405  float searchRadius,
3406  int polygonialOrder,
3407  int upsamplingMethod, // NONE, DISTINCT_CLOUD, SAMPLE_LOCAL_PLANE, RANDOM_UNIFORM_DENSITY, VOXEL_GRID_DILATION
3408  float upsamplingRadius, // SAMPLE_LOCAL_PLANE
3409  float upsamplingStep, // SAMPLE_LOCAL_PLANE
3410  int pointDensity, // RANDOM_UNIFORM_DENSITY
3411  float dilationVoxelSize, // VOXEL_GRID_DILATION
3412  int dilationIterations) // VOXEL_GRID_DILATION
3413 {
3414  pcl::IndicesPtr indices(new std::vector<int>);
3415  return mls(cloud,
3416  indices,
3417  searchRadius,
3418  polygonialOrder,
3419  upsamplingMethod,
3420  upsamplingRadius,
3421  upsamplingStep,
3422  pointDensity,
3423  dilationVoxelSize,
3424  dilationIterations);
3425 }
3426 
3427 pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr mls(
3428  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
3429  const pcl::IndicesPtr & indices,
3430  float searchRadius,
3431  int polygonialOrder,
3432  int upsamplingMethod, // NONE, DISTINCT_CLOUD, SAMPLE_LOCAL_PLANE, RANDOM_UNIFORM_DENSITY, VOXEL_GRID_DILATION
3433  float upsamplingRadius, // SAMPLE_LOCAL_PLANE
3434  float upsamplingStep, // SAMPLE_LOCAL_PLANE
3435  int pointDensity, // RANDOM_UNIFORM_DENSITY
3436  float dilationVoxelSize, // VOXEL_GRID_DILATION
3437  int dilationIterations) // VOXEL_GRID_DILATION
3438 {
3439  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
3440  pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB>);
3441  if(indices->size())
3442  {
3443  tree->setInputCloud (cloud, indices);
3444  }
3445  else
3446  {
3447  tree->setInputCloud (cloud);
3448  }
3449 
3450  // Init object (second point type is for the normals)
3451  pcl::MovingLeastSquares<pcl::PointXYZRGB, pcl::PointXYZRGBNormal> mls;
3452 
3453  // Set parameters
3454  mls.setComputeNormals (true);
3455  if(polygonialOrder > 0)
3456  {
3457 #if PCL_VERSION_COMPARE(<, 1, 10, 0)
3458  mls.setPolynomialFit (true);
3459 #endif
3460  mls.setPolynomialOrder(polygonialOrder);
3461  }
3462  else
3463  {
3464 #if PCL_VERSION_COMPARE(<, 1, 10, 0)
3465  mls.setPolynomialFit (false);
3466 #else
3467  mls.setPolynomialOrder(1);
3468 #endif
3469  }
3470  UASSERT(upsamplingMethod >= mls.NONE &&
3471  upsamplingMethod <= mls.VOXEL_GRID_DILATION);
3472  mls.setUpsamplingMethod((pcl::MovingLeastSquares<pcl::PointXYZRGB, pcl::PointXYZRGBNormal>::UpsamplingMethod)upsamplingMethod);
3473  mls.setSearchRadius(searchRadius);
3474  mls.setUpsamplingRadius(upsamplingRadius);
3475  mls.setUpsamplingStepSize(upsamplingStep);
3476  mls.setPointDensity(pointDensity);
3477  mls.setDilationVoxelSize(dilationVoxelSize);
3478  mls.setDilationIterations(dilationIterations);
3479 
3480  // Reconstruct
3481  mls.setInputCloud (cloud);
3482  if(indices->size())
3483  {
3484  mls.setIndices(indices);
3485  }
3486  mls.setSearchMethod (tree);
3487  mls.process (*cloud_with_normals);
3488 
3489  // It seems that returned normals are not normalized!? FIXME: Is it a bug only in PCL 1.7.1?
3490  for(unsigned int i=0; i<cloud_with_normals->size(); ++i)
3491  {
3492  Eigen::Vector3f normal(cloud_with_normals->at(i).normal_x, cloud_with_normals->at(i).normal_y, cloud_with_normals->at(i).normal_z);
3493  normal.normalize();
3494  cloud_with_normals->at(i).normal_x = normal[0];
3495  cloud_with_normals->at(i).normal_y = normal[1];
3496  cloud_with_normals->at(i).normal_z = normal[2];
3497  }
3498 
3499  return cloud_with_normals;
3500 }
3501 
3503  const LaserScan & scan,
3504  const Eigen::Vector3f & viewpoint,
3505  bool forceGroundNormalsUp)
3506 {
3507  return adjustNormalsToViewPoint(scan, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3508 }
3510  const LaserScan & scan,
3511  const Eigen::Vector3f & viewpoint,
3512  float groundNormalsUp)
3513 {
3514  if(scan.size() && !scan.is2d() && scan.hasNormals())
3515  {
3516  int nx = scan.getNormalsOffset();
3517  int ny = nx+1;
3518  int nz = ny+1;
3519  cv::Mat output = scan.data().clone();
3520  #pragma omp parallel for
3521  for(int j=0; j<scan.data().rows; ++j)
3522  {
3523  for(int i=0; i<scan.data().cols; ++i)
3524  {
3525  float * ptr = output.ptr<float>(j, i);
3526  if(uIsFinite(ptr[nx]) && uIsFinite(ptr[ny]) && uIsFinite(ptr[nz]))
3527  {
3528  Eigen::Vector3f v = viewpoint - Eigen::Vector3f(ptr[0], ptr[1], ptr[2]);
3529  Eigen::Vector3f n(ptr[nx], ptr[ny], ptr[nz]);
3530 
3531  float result = v.dot(n);
3532  if(result < 0
3533  || (groundNormalsUp>0.0f && ptr[nz] < -groundNormalsUp && ptr[2] < viewpoint[3])) // some far velodyne rays on road can have normals toward ground
3534  {
3535  //reverse normal
3536  ptr[nx] *= -1.0f;
3537  ptr[ny] *= -1.0f;
3538  ptr[nz] *= -1.0f;
3539  }
3540  }
3541  }
3542  }
3543  if(scan.angleIncrement() > 0.0f)
3544  {
3545  return LaserScan(output, scan.format(), scan.rangeMin(), scan.rangeMax(), scan.angleMin(), scan.angleMax(), scan.angleIncrement(), scan.localTransform());
3546  }
3547  else
3548  {
3549  return LaserScan(output, scan.maxPoints(), scan.rangeMax(), scan.format(), scan.localTransform());
3550  }
3551  }
3552  return scan;
3553 }
3554 
3555 template<typename PointNormalT>
3557  typename pcl::PointCloud<PointNormalT>::Ptr & cloud,
3558  const Eigen::Vector3f & viewpoint,
3559  float groundNormalsUp)
3560 {
3561  #pragma omp parallel for
3562  for(int i=0; i<(int)cloud->size(); ++i)
3563  {
3564  pcl::PointXYZ normal(cloud->points[i].normal_x, cloud->points[i].normal_y, cloud->points[i].normal_z);
3565  if(pcl::isFinite(normal))
3566  {
3567  Eigen::Vector3f v = viewpoint - cloud->points[i].getVector3fMap();
3568  Eigen::Vector3f n(normal.x, normal.y, normal.z);
3569 
3570  float result = v.dot(n);
3571  if(result < 0
3572  || (groundNormalsUp>0.0f && normal.z < -groundNormalsUp && cloud->points[i].z < viewpoint[3])) // some far velodyne rays on road can have normals toward ground
3573  {
3574  //reverse normal
3575  cloud->points[i].normal_x *= -1.0f;
3576  cloud->points[i].normal_y *= -1.0f;
3577  cloud->points[i].normal_z *= -1.0f;
3578  }
3579  }
3580  }
3581 }
3582 
3584  pcl::PointCloud<pcl::PointNormal>::Ptr & cloud,
3585  const Eigen::Vector3f & viewpoint,
3586  bool forceGroundNormalsUp)
3587 {
3588  adjustNormalsToViewPoint(cloud, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3589 }
3591  pcl::PointCloud<pcl::PointNormal>::Ptr & cloud,
3592  const Eigen::Vector3f & viewpoint,
3593  float groundNormalsUp)
3594 {
3595  adjustNormalsToViewPointImpl<pcl::PointNormal>(cloud, viewpoint, groundNormalsUp);
3596 }
3597 
3599  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
3600  const Eigen::Vector3f & viewpoint,
3601  bool forceGroundNormalsUp)
3602 {
3603  adjustNormalsToViewPoint(cloud, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3604 }
3606  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
3607  const Eigen::Vector3f & viewpoint,
3608  float groundNormalsUp)
3609 {
3610  adjustNormalsToViewPointImpl<pcl::PointXYZRGBNormal>(cloud, viewpoint, groundNormalsUp);
3611 }
3612 
3614  pcl::PointCloud<pcl::PointXYZINormal>::Ptr & cloud,
3615  const Eigen::Vector3f & viewpoint,
3616  bool forceGroundNormalsUp)
3617 {
3618  adjustNormalsToViewPoint(cloud, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3619 }
3621  pcl::PointCloud<pcl::PointXYZINormal>::Ptr & cloud,
3622  const Eigen::Vector3f & viewpoint,
3623  float groundNormalsUp)
3624 {
3625  adjustNormalsToViewPointImpl<pcl::PointXYZINormal>(cloud, viewpoint, groundNormalsUp);
3626 }
3627 
3628 
3629 template<typename PointT>
3631  const std::map<int, Transform> & poses,
3632  const pcl::PointCloud<pcl::PointXYZ>::Ptr & rawCloud,
3633  const std::vector<int> & rawCameraIndices,
3634  typename pcl::PointCloud<PointT>::Ptr & cloud,
3635  float groundNormalsUp)
3636 {
3637  if(poses.size() && rawCloud->size() && rawCloud->size() == rawCameraIndices.size() && cloud->size())
3638  {
3639  pcl::search::KdTree<pcl::PointXYZ>::Ptr rawTree (new pcl::search::KdTree<pcl::PointXYZ>);
3640  rawTree->setInputCloud (rawCloud);
3641 
3642  #pragma omp parallel for
3643  for(int i=0; i<(int)cloud->size(); ++i)
3644  {
3645  pcl::PointXYZ normal(cloud->points[i].normal_x, cloud->points[i].normal_y, cloud->points[i].normal_z);
3646  if(pcl::isFinite(normal))
3647  {
3648  std::vector<int> indices;
3649  std::vector<float> dist;
3650  rawTree->nearestKSearch(pcl::PointXYZ(cloud->points[i].x, cloud->points[i].y, cloud->points[i].z), 1, indices, dist);
3651  UASSERT(indices.size() == 1);
3652  if(indices.size() && indices[0]>=0)
3653  {
3654  const Transform & p = poses.at(rawCameraIndices[indices[0]]);
3655  pcl::PointXYZ viewpoint(p.x(), p.y(), p.z());
3656  Eigen::Vector3f v = viewpoint.getVector3fMap() - cloud->points[i].getVector3fMap();
3657 
3658  Eigen::Vector3f n(normal.x, normal.y, normal.z);
3659 
3660  float result = v.dot(n);
3661  if(result < 0 ||
3662  (groundNormalsUp>0.0f && normal.z < -groundNormalsUp && cloud->points[i].z < viewpoint.z)) // some far velodyne rays on road can have normals toward ground)
3663  {
3664  //reverse normal
3665  cloud->points[i].normal_x *= -1.0f;
3666  cloud->points[i].normal_y *= -1.0f;
3667  cloud->points[i].normal_z *= -1.0f;
3668  }
3669  }
3670  else
3671  {
3672  UWARN("Not found camera viewpoint for point %d", i);
3673  }
3674  }
3675  }
3676  }
3677 }
3678 
3680  const std::map<int, Transform> & poses,
3681  const pcl::PointCloud<pcl::PointXYZ>::Ptr & rawCloud,
3682  const std::vector<int> & rawCameraIndices,
3683  pcl::PointCloud<pcl::PointNormal>::Ptr & cloud,
3684  float groundNormalsUp)
3685 {
3686  adjustNormalsToViewPointsImpl<pcl::PointNormal>(poses, rawCloud, rawCameraIndices, cloud, groundNormalsUp);
3687 }
3688 
3690  const std::map<int, Transform> & poses,
3691  const pcl::PointCloud<pcl::PointXYZ>::Ptr & rawCloud,
3692  const std::vector<int> & rawCameraIndices,
3693  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
3694  float groundNormalsUp)
3695 {
3696  adjustNormalsToViewPointsImpl<pcl::PointXYZRGBNormal>(poses, rawCloud, rawCameraIndices, cloud, groundNormalsUp);
3697 }
3698 
3700  const std::map<int, Transform> & poses,
3701  const pcl::PointCloud<pcl::PointXYZ>::Ptr & rawCloud,
3702  const std::vector<int> & rawCameraIndices,
3703  pcl::PointCloud<pcl::PointXYZINormal>::Ptr & cloud,
3704  float groundNormalsUp)
3705 {
3706  adjustNormalsToViewPointsImpl<pcl::PointXYZINormal>(poses, rawCloud, rawCameraIndices, cloud, groundNormalsUp);
3707 }
3708 
3710  const std::map<int, Transform> & viewpoints,
3711  const LaserScan & rawScan,
3712  const std::vector<int> & viewpointIds,
3713  LaserScan & scan,
3714  float groundNormalsUp)
3715 {
3716  UDEBUG("poses=%d, rawCloud=%d, rawCameraIndices=%d, cloud=%d", (int)viewpoints.size(), (int)rawScan.size(), (int)viewpointIds.size(), (int)scan.size());
3717  if(viewpoints.size() && rawScan.size() && rawScan.size() == (int)viewpointIds.size() && scan.size() && scan.hasNormals())
3718  {
3719  pcl::PointCloud<pcl::PointXYZ>::Ptr rawCloud = util3d::laserScanToPointCloud(rawScan);
3720  pcl::search::KdTree<pcl::PointXYZ>::Ptr rawTree (new pcl::search::KdTree<pcl::PointXYZ>);
3721  rawTree->setInputCloud (rawCloud);
3722  #pragma omp parallel for
3723  for(int i=0; i<scan.size(); ++i)
3724  {
3725  pcl::PointNormal point = util3d::laserScanToPointNormal(scan, i);
3726  pcl::PointXYZ normal(point.normal_x, point.normal_y, point.normal_z);
3727  if(pcl::isFinite(normal))
3728  {
3729  std::vector<int> indices;
3730  std::vector<float> dist;
3731  rawTree->nearestKSearch(pcl::PointXYZ(point.x, point.y, point.z), 1, indices, dist);
3732  if(indices.size() && indices[0]>=0)
3733  {
3734  UASSERT_MSG(indices[0]<(int)viewpointIds.size(), uFormat("indices[0]=%d rawCameraIndices.size()=%d", indices[0], (int)viewpointIds.size()).c_str());
3735  UASSERT(uContains(viewpoints, viewpointIds[indices[0]]));
3736  Transform p = viewpoints.at(viewpointIds[indices[0]]);
3737  pcl::PointXYZ viewpoint(p.x(), p.y(), p.z());
3738  Eigen::Vector3f v = viewpoint.getVector3fMap() - point.getVector3fMap();
3739 
3740  Eigen::Vector3f n(normal.x, normal.y, normal.z);
3741 
3742  float result = v.dot(n);
3743  if(result < 0 ||
3744  (groundNormalsUp>0.0f && normal.z < -groundNormalsUp && point.z < viewpoint.z)) // some far velodyne rays on road can have normals toward ground))
3745  {
3746  //reverse normal
3747  scan.field(i, scan.getNormalsOffset()) *= -1.0f;
3748  scan.field(i, scan.getNormalsOffset()+1) *= -1.0f;
3749  scan.field(i, scan.getNormalsOffset()+2) *= -1.0f;
3750  }
3751  }
3752  else
3753  {
3754  UWARN("Not found camera viewpoint for point %d!?", i);
3755  }
3756  }
3757  }
3758  }
3759 }
3760 
3761 pcl::PolygonMesh::Ptr meshDecimation(const pcl::PolygonMesh::Ptr & mesh, float factor)
3762 {
3763  pcl::PolygonMesh::Ptr output(new pcl::PolygonMesh);
3764 #ifndef DISABLE_VTK
3765  pcl::MeshQuadricDecimationVTK mqd;
3766  mqd.setTargetReductionFactor(factor);
3767  mqd.setInputMesh(mesh);
3768  mqd.process (*output);
3769 #else
3770  UWARN("RTAB-Map is not built with VTK module so mesh decimation cannot be used!");
3771  *output = *mesh;
3772 #endif
3773  return output;
3774 }
3775 
3777  const Eigen::Vector3f & p,
3778  const Eigen::Vector3f & dir,
3779  const Eigen::Vector3f & v0,
3780  const Eigen::Vector3f & v1,
3781  const Eigen::Vector3f & v2,
3782  float & distance,
3783  Eigen::Vector3f & normal)
3784 {
3785  // get triangle edge cv::Vec3fs and plane normal
3786  const Eigen::Vector3f u = v1-v0;
3787  const Eigen::Vector3f v = v2-v0;
3788  normal = u.cross(v); // cross product
3789  if (normal == Eigen::Vector3f(0,0,0)) // triangle is degenerate
3790  return false; // do not deal with this case
3791 
3792  const float denomimator = normal.dot(dir);
3793  if (fabs(denomimator) < 10e-9) // ray is parallel to triangle plane
3794  return false;
3795 
3796  // get intersect of ray with triangle plane
3797  distance = normal.dot(v0 - p) / denomimator;
3798  if (distance < 0.0) // ray goes away from triangle
3799  return false;
3800 
3801  // is I inside T?
3802  float uu, uv, vv, wu, wv, D;
3803  uu = u.dot(u);
3804  uv = u.dot(v);
3805  vv = v.dot(v);
3806  const Eigen::Vector3f w = p + dir * distance - v0;
3807  wu = w.dot(u);
3808  wv = w.dot(v);
3809  D = uv * uv - uu * vv;
3810 
3811  // get and test parametric coords
3812  float s, t;
3813  s = (uv * wv - vv * wu) / D;
3814  if (s < 0.0 || s > 1.0) // I is outside T
3815  return false;
3816  t = (uv * wu - uu * wv) / D;
3817  if (t < 0.0 || (s + t) > 1.0) // I is outside T
3818  return false;
3819 
3820  return true; // I is in T
3821 }
3822 
3823 }
3824 
3825 }
rtabmap::SensorData
Definition: SensorData.h:51
w
RowVector3d w
int
int
rtabmap::LaserScan::localTransform
Transform localTransform() const
Definition: LaserScan.h:127
rtabmap::util3d::sqr
double sqr(uchar v)
Definition: util3d_surface.cpp:1433
reference
reference
Compression.h
rtabmap::DBDriver::getNodeData
void getNodeData(int signatureId, SensorData &data, bool images=true, bool scan=true, bool userData=true, bool occupancyGrid=true) const
Definition: DBDriver.cpp:685
rtabmap::DBDriver::getCalibration
bool getCalibration(int signatureId, std::vector< CameraModel > &models, std::vector< StereoCameraModel > &stereoModels) const
Definition: DBDriver.cpp:736
UFile::getName
std::string getName()
Definition: UFile.h:135
rtabmap::util3d::computeNormals2DImpl
pcl::PointCloud< pcl::Normal >::Ptr computeNormals2DImpl(const typename pcl::PointCloud< PointT >::Ptr &cloud, int searchK, float searchRadius, const Eigen::Vector3f &viewPoint)
Definition: util3d_surface.cpp:2850
rtabmap::uncompressImage
cv::Mat RTABMAP_CORE_EXPORT uncompressImage(const cv::Mat &bytes)
Definition: Compression.cpp:130
UINFO
#define UINFO(...)
rtabmap::util3d::createPolygonIndexes
void RTABMAP_CORE_EXPORT createPolygonIndexes(const std::vector< pcl::Vertices > &polygons, int cloudSize, std::vector< std::set< int > > &neighborPolygons, std::vector< std::set< int > > &vertexPolygons)
Given a set of polygons, create two indexes: polygons to neighbor polygons and vertices to polygons.
Definition: util3d_surface.cpp:94
uMean
T uMean(const T *v, unsigned int size)
Definition: UMath.h:399
UFile::erase
static int erase(const std::string &filePath)
Definition: UFile.cpp:58
texture_mapping.h
D
MatrixXcd D
util3d_surface.h
rtabmap::util3d::adjustNormalsToViewPointsImpl
void adjustNormalsToViewPointsImpl(const std::map< int, Transform > &poses, const pcl::PointCloud< pcl::PointXYZ >::Ptr &rawCloud, const std::vector< int > &rawCameraIndices, typename pcl::PointCloud< PointT >::Ptr &cloud, float groundNormalsUp)
Definition: util3d_surface.cpp:3630
rtabmap::util3d::filterInvalidPolygons
std::vector< pcl::Vertices > RTABMAP_CORE_EXPORT filterInvalidPolygons(const std::vector< pcl::Vertices > &polygons)
Definition: util3d_surface.cpp:531
v1
v1
rtabmap::LaserScan::maxPoints
int maxPoints() const
Definition: LaserScan.h:120
UDirectory::getFileNames
const std::list< std::string > & getFileNames() const
Definition: UDirectory.h:129
UDirectory::makeDir
static bool makeDir(const std::string &dirPath)
Definition: UDirectory.cpp:333
pcl::TextureMapping::setMaxDepthError
void setMaxDepthError(float maxDepthError)
Definition: texture_mapping.h:181
alpha
RealScalar alpha
rtabmap::LaserScan::rangeMax
float rangeMax() const
Definition: LaserScan.h:122
timer
s
RealScalar s
rtabmap::LaserScan::hasRGB
bool hasRGB() const
Definition: LaserScan.h:135
rtabmap::util3d::createTextureCameras
pcl::texture_mapping::CameraVector createTextureCameras(const std::map< int, Transform > &poses, const std::map< int, std::vector< CameraModel > > &cameraModels, const std::map< int, cv::Mat > &cameraDepths, const std::vector< float > &roiRatios)
Definition: util3d_surface.cpp:598
b
Array< int, 3, 1 > b
rtabmap::util3d::cleanTextureMesh
void RTABMAP_CORE_EXPORT cleanTextureMesh(pcl::TextureMesh &textureMesh, int minClusterSize)
Definition: util3d_surface.cpp:873
rtflann::uchar
unsigned char uchar
Definition: matrix.h:69
uSum
T uSum(const std::list< T > &list)
Definition: UMath.h:318
rtabmap::util3d::removeNaNNormalsFromPointCloud
pcl::PointCloud< pcl::PointNormal >::Ptr RTABMAP_CORE_EXPORT removeNaNNormalsFromPointCloud(const pcl::PointCloud< pcl::PointNormal >::Ptr &cloud)
Definition: util3d_filtering.cpp:1070
rtabmap::Memory::getNodeInfo
bool getNodeInfo(int signatureId, Transform &odomPose, int &mapId, int &weight, std::string &label, double &stamp, Transform &groundTruth, std::vector< float > &velocity, GPS &gps, EnvSensors &sensors, bool lookInDatabase=false) const
Definition: Memory.cpp:4063
h
const double h
tree
UDirectory.h
size
Index size
rtabmap::CameraModel
Definition: CameraModel.h:38
pcl::TextureMapping::setMaxDistance
void setMaxDistance(float maxDistance)
Definition: texture_mapping.h:175
rtabmap::util3d::createTextureMesh
pcl::TextureMesh::Ptr RTABMAP_CORE_EXPORT createTextureMesh(const pcl::PolygonMesh::Ptr &mesh, const std::map< int, Transform > &poses, const std::map< int, CameraModel > &cameraModels, const std::map< int, cv::Mat > &cameraDepths, float maxDistance=0.0f, float maxDepthError=0.0f, float maxAngle=0.0f, int minClusterSize=50, const std::vector< float > &roiRatios=std::vector< float >(), const ProgressState *state=0, std::vector< std::map< int, pcl::PointXY > > *vertexToPixels=0, bool distanceToCamPolicy=false)
Definition: util3d_surface.cpp:675
rtabmap::util3d::appendMesh
void RTABMAP_CORE_EXPORT appendMesh(pcl::PointCloud< pcl::PointXYZRGBNormal > &cloudA, std::vector< pcl::Vertices > &polygonsA, const pcl::PointCloud< pcl::PointXYZRGBNormal > &cloudB, const std::vector< pcl::Vertices > &polygonsB)
Definition: util3d_surface.cpp:283
rtabmap::LaserScan::data
const cv::Mat & data() const
Definition: LaserScan.h:116
tango_gl::vertices
static const float vertices[]
Definition: quad.cpp:39
organized_fast_mesh.h
count
Index count
rtabmap::GPS
Definition: GPS.h:35
rtabmap::LaserScan::getNormalsOffset
int getNormalsOffset() const
Definition: LaserScan.h:145
ULogger::kDebug
@ kDebug
Definition: ULogger.h:252
col
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE ColXpr col(Index i) const
rtabmap_netvlad.img
img
Definition: rtabmap_netvlad.py:78
state
RecoveryProgressState state
Definition: tools/Recovery/main.cpp:56
pcl::OrganizedFastMesh::setViewpoint
void setViewpoint(const Eigen::Vector3f &viewpoint)
Set the viewpoint from where the input point cloud has been acquired.
Definition: organized_fast_mesh.h:178
rtabmap::util3d::assembleTextureMesh
pcl::TextureMesh::Ptr RTABMAP_CORE_EXPORT assembleTextureMesh(const cv::Mat &cloudMat, const std::vector< std::vector< std::vector< RTABMAP_PCL_INDEX > > > &polygons, const std::vector< std::vector< Eigen::Vector2f > > &texCoords, cv::Mat &textures, bool mergeTextures=false)
Definition: util3d_surface.cpp:1272
rtabmap::util3d::clusterPolygons
std::list< std::list< int > > RTABMAP_CORE_EXPORT clusterPolygons(const std::vector< std::set< int > > &neighborPolygons, int minClusterSize=0)
Definition: util3d_surface.cpp:131
rtabmap::util3d::adjustNormalsToViewPoint
RTABMAP_DEPRECATED LaserScan RTABMAP_CORE_EXPORT adjustNormalsToViewPoint(const LaserScan &scan, const Eigen::Vector3f &viewpoint, bool forceGroundNormalsUp)
Definition: util3d_surface.cpp:3502
y
Matrix3f y
uIsInteger
bool uIsInteger(const std::string &str, bool checkForSign=true)
Definition: UStl.h:630
rtabmap::util2d::brightnessAndContrastAuto
cv::Mat RTABMAP_CORE_EXPORT brightnessAndContrastAuto(const cv::Mat &src, const cv::Mat &mask, float clipLowHistPercent=0, float clipHighHistPercent=0, float *alphaOut=0, float *betaOut=0)
Automatic brightness and contrast optimization with optional histogram clipping.
Definition: util2d.cpp:1942
rtabmap::util3d::convertPolygonsFromPCL
std::vector< std::vector< RTABMAP_PCL_INDEX > > RTABMAP_CORE_EXPORT convertPolygonsFromPCL(const std::vector< pcl::Vertices > &polygons)
Definition: util3d_surface.cpp:1227
iterator
rtabmap::util3d::computeFastOrganizedNormals2DImpl
pcl::PointCloud< pcl::Normal >::Ptr computeFastOrganizedNormals2DImpl(const typename pcl::PointCloud< PointT >::Ptr &cloud, int searchK, float searchRadius, const Eigen::Vector3f &viewPoint)
Definition: util3d_surface.cpp:2941
rtabmap::LaserScan::hasNormals
bool hasNormals() const
Definition: LaserScan.h:134
uVariance
T uVariance(const T *v, unsigned int size, T meanV)
Definition: UMath.h:489
rtabmap::util3d::concatenate
pcl::IndicesPtr RTABMAP_CORE_EXPORT concatenate(const std::vector< pcl::IndicesPtr > &indices)
Concatenate a vector of indices to a single vector.
Definition: util3d.cpp:3352
last
static const symbolic::SymbolExpr< internal::symbolic_last_tag > last
util3d.h
rtabmap::LaserScan
Definition: LaserScan.h:37
beta
double beta(double a, double b)
rtabmap::util3d::laserScanFromPointCloud
LaserScan laserScanFromPointCloud(const PointCloud2T &cloud, bool filterNaNs, bool is2D, const Transform &transform)
Definition: util3d.hpp:37
rows
int rows
UTimer.h
rtabmap::maxDepthError
const float maxDepthError
Definition: CameraTango.cpp:43
rtabmap::LaserScan::size
int size() const
Definition: LaserScan.h:131
cam
rtabmap::SensorCaptureThread * cam
Definition: tools/DataRecorder/main.cpp:58
rtabmap::LaserScan::isEmpty
bool isEmpty() const
Definition: LaserScan.h:130
UMath.h
Basic mathematics functions.
indices
indices
rtabmap::util3d::organizedFastMesh
std::vector< pcl::Vertices > RTABMAP_CORE_EXPORT organizedFastMesh(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, double angleTolerance, bool quad, int trianglePixelSize, const Eigen::Vector3f &viewpoint=Eigen::Vector3f(0, 0, 0))
Definition: util3d_surface.cpp:168
rtabmap::util3d::computeNormalsImpl
pcl::PointCloud< pcl::Normal >::Ptr computeNormalsImpl(const typename pcl::PointCloud< PointT >::Ptr &cloud, const pcl::IndicesPtr &indices, int searchK, float searchRadius, const Eigen::Vector3f &viewPoint)
Definition: util3d_surface.cpp:2756
fabs
Real fabs(const Real &a)
pcl::TextureMapping
The texture mapping algorithm.
Definition: texture_mapping.h:105
n
int n
glm::intersectRayTriangle
GLM_FUNC_DECL bool intersectRayTriangle(genType const &orig, genType const &dir, genType const &vert0, genType const &vert1, genType const &vert2, genType &baryPosition)
rtabmap::LaserScan::format
Format format() const
Definition: LaserScan.h:117
pcl::OrganizedFastMesh::setAngleTolerance
void setAngleTolerance(float angle_tolerance)
Set the angle tolerance used for checking whether or not an edge is occluded. Standard values are 5de...
Definition: organized_fast_mesh.h:204
rtabmap::util3d::transformPoint
cv::Point3f RTABMAP_CORE_EXPORT transformPoint(const cv::Point3f &pt, const Transform &transform)
Definition: util3d_transforms.cpp:211
rtabmap::util3d::computeNormalsComplexity
float RTABMAP_CORE_EXPORT computeNormalsComplexity(const LaserScan &scan, const Transform &t=Transform::getIdentity(), cv::Mat *pcaEigenVectors=0, cv::Mat *pcaEigenValues=0)
Definition: util3d_surface.cpp:3100
point
point
rtabmap::util3d::mergeTextures
cv::Mat RTABMAP_CORE_EXPORT mergeTextures(pcl::TextureMesh &mesh, const std::map< int, cv::Mat > &images, const std::map< int, CameraModel > &calibrations, const Memory *memory=0, const DBDriver *dbDriver=0, int textureSize=4096, int textureCount=1, const std::vector< std::map< int, pcl::PointXY > > &vertexToPixels=std::vector< std::map< int, pcl::PointXY > >(), bool gainCompensation=true, float gainBeta=10.0f, bool gainRGB=true, bool blending=true, int blendingDecimation=0, int brightnessContrastRatioLow=0, int brightnessContrastRatioHigh=0, bool exposureFusion=false, const ProgressState *state=0, unsigned char blankValue=255, std::map< int, std::map< int, cv::Vec4d > > *gains=0, std::map< int, std::map< int, cv::Mat > > *blendingGains=0, std::pair< float, float > *contrastValues=0)
Definition: util3d_surface.cpp:1438
uContains
bool uContains(const std::list< V > &list, const V &value)
Definition: UStl.h:407
rtabmap::LaserScan::field
float & field(unsigned int pointIndex, unsigned int channelOffset)
Definition: LaserScan.cpp:446
pcl::OrganizedFastMesh
Simple triangulation/surface reconstruction for organized point clouds. Neighboring points (pixels in...
Definition: organized_fast_mesh.h:65
A
data
int data[]
Eigen::aligned_allocator
UDirectory::removeDir
static bool removeDir(const std::string &dirPath)
Definition: UDirectory.cpp:344
util3d_transforms.h
gtsam::Values::end
deref_iterator end() const
UDirectory::getDir
static std::string getDir(const std::string &filePath)
Definition: UDirectory.cpp:273
scale
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy y set format x g set format y g set format x2 g set format y2 g set format z g set angles radians set nogrid set key title set key left top Right noreverse box linetype linewidth samplen spacing width set nolabel set noarrow set nologscale set logscale x set set pointsize set encoding default set nopolar set noparametric set set set set surface set nocontour set clabel set mapping cartesian set nohidden3d set cntrparam order set cntrparam linear set cntrparam levels auto set cntrparam points set size set set xzeroaxis lt lw set x2zeroaxis lt lw set yzeroaxis lt lw set y2zeroaxis lt lw set tics in set ticslevel set tics scale
g
float g
j
std::ptrdiff_t j
uNumber2Str
std::string UTILITE_EXPORT uNumber2Str(unsigned int number)
Definition: UConversion.cpp:91
rtabmap::util3d::computeFastOrganizedNormals
pcl::PointCloud< pcl::Normal >::Ptr RTABMAP_CORE_EXPORT computeFastOrganizedNormals(const pcl::PointCloud< pcl::PointXYZRGB >::Ptr &cloud, float maxDepthChangeFactor=0.02f, float normalSmoothingSize=10.0f, const Eigen::Vector3f &viewPoint=Eigen::Vector3f(0, 0, 0))
Definition: util3d_surface.cpp:3052
UConversion.h
Some conversion functions.
glm::sqrt
GLM_FUNC_DECL vecType< T, P > sqrt(vecType< T, P > const &x)
UDirectory::getNextFilePath
std::string getNextFilePath()
Definition: UDirectory.cpp:232
util3d_filtering.h
glm::pi
GLM_FUNC_DECL genType pi()
rtabmap::util3d::laserScanToPointCloudRGB
pcl::PointCloud< pcl::PointXYZRGB >::Ptr RTABMAP_CORE_EXPORT laserScanToPointCloudRGB(const LaserScan &laserScan, const Transform &transform=Transform(), unsigned char r=100, unsigned char g=100, unsigned char b=100)
Definition: util3d.cpp:2322
rtabmap::LaserScan::is2d
bool is2d() const
Definition: LaserScan.h:133
all
static const Eigen::internal::all_t all
first
constexpr int first(int i)
ULogger::kWarning
@ kWarning
Definition: ULogger.h:252
rtabmap::util3d::gcd
int gcd(int a, int b)
Definition: util3d_surface.cpp:1075
ULogger::kInfo
@ kInfo
Definition: ULogger.h:252
glm::row
GLM_FUNC_DECL genType::row_type row(genType const &m, length_t const &index)
rtabmap::util3d::laserScan2dFromPointCloud
LaserScan RTABMAP_CORE_EXPORT laserScan2dFromPointCloud(const pcl::PointCloud< pcl::PointXYZ > &cloud, const Transform &transform=Transform(), bool filterNaNs=true)
Definition: util3d.cpp:1992
rtabmap::ProgressState::isCanceled
bool isCanceled() const
Definition: ProgressState.h:51
UASSERT
#define UASSERT(condition)
rtabmap::ProgressState
Definition: ProgressState.h:35
z
z
RecoveryProgressState::callback
virtual bool callback(const std::string &msg) const
Definition: tools/Recovery/main.cpp:49
x
x
m
Matrix3f m
p
Point3_ p(2)
rtabmap::util3d::concatenateTextureMaterials
void RTABMAP_CORE_EXPORT concatenateTextureMaterials(pcl::TextureMesh &mesh, const cv::Size &imageSize, int textureSize, int maxTextures, float &scale, std::vector< bool > *materialsKept=0)
Definition: util3d_surface.cpp:1079
rtabmap::util3d::filterNotUsedVerticesFromMesh
std::vector< int > RTABMAP_CORE_EXPORT filterNotUsedVerticesFromMesh(const pcl::PointCloud< pcl::PointXYZRGBNormal > &cloud, const std::vector< pcl::Vertices > &polygons, pcl::PointCloud< pcl::PointXYZRGBNormal > &outputCloud, std::vector< pcl::Vertices > &outputPolygons)
Definition: util3d_surface.cpp:335
rtabmap::Memory::getNodeData
SensorData getNodeData(int locationId, bool images, bool scan, bool userData, bool occupancyGrid) const
Definition: Memory.cpp:4113
DBDriver.h
glm::angle
GLM_FUNC_DECL T angle(detail::tquat< T, P > const &x)
rtabmap::util3d::meshDecimation
pcl::PolygonMesh::Ptr RTABMAP_CORE_EXPORT meshDecimation(const pcl::PolygonMesh::Ptr &mesh, float factor)
Definition: util3d_surface.cpp:3761
uStrContains
bool uStrContains(const std::string &string, const std::string &substring)
Definition: UStl.h:785
rtabmap::util3d::laserScanToPointCloudI
pcl::PointCloud< pcl::PointXYZI >::Ptr RTABMAP_CORE_EXPORT laserScanToPointCloudI(const LaserScan &laserScan, const Transform &transform=Transform(), float intensity=0.0f)
Definition: util3d.cpp:2349
normal
Unit3_ normal(const OrientedPlane3_ &p)
anyset::size
size_t size() const
rtabmap::util3d::computeNormals2D
pcl::PointCloud< pcl::Normal >::Ptr RTABMAP_CORE_EXPORT computeNormals2D(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, int searchK=5, float searchRadius=0.0f, const Eigen::Vector3f &viewPoint=Eigen::Vector3f(0, 0, 0))
Definition: util3d_surface.cpp:2923
rtabmap::LaserScan::angleMax
float angleMax() const
Definition: LaserScan.h:124
rtabmap::util3d::laserScanToPointNormal
pcl::PointNormal RTABMAP_CORE_EXPORT laserScanToPointNormal(const LaserScan &laserScan, int index)
Definition: util3d.cpp:2443
rtabmap::util3d::multiBandTexturing
RTABMAP_DEPRECATED bool RTABMAP_CORE_EXPORT multiBandTexturing(const std::string &outputOBJPath, const pcl::PCLPointCloud2 &cloud, const std::vector< pcl::Vertices > &polygons, const std::map< int, Transform > &cameraPoses, const std::vector< std::map< int, pcl::PointXY > > &vertexToPixels, const std::map< int, cv::Mat > &images, const std::map< int, std::vector< CameraModel > > &cameraModels, const Memory *memory=0, const DBDriver *dbDriver=0, int textureSize=8192, const std::string &textureFormat="jpg", const std::map< int, std::map< int, cv::Vec4d > > &gains=std::map< int, std::map< int, cv::Vec4d > >(), const std::map< int, std::map< int, cv::Mat > > &blendingGains=std::map< int, std::map< int, cv::Mat > >(), const std::pair< float, float > &contrastValues=std::pair< float, float >(0, 0), bool gainRGB=true)
Definition: util3d_surface.cpp:2234
pcl::OrganizedFastMesh::setTriangulationType
void setTriangulationType(TriangulationType type)
Set the triangulation type (see TriangulationType)
Definition: organized_fast_mesh.h:170
rtabmap::util3d::filterNaNPointsFromMesh
std::vector< int > RTABMAP_CORE_EXPORT filterNaNPointsFromMesh(const pcl::PointCloud< pcl::PointXYZRGB > &cloud, const std::vector< pcl::Vertices > &polygons, pcl::PointCloud< pcl::PointXYZRGB > &outputCloud, std::vector< pcl::Vertices > &outputPolygons)
Definition: util3d_surface.cpp:415
UASSERT_MSG
#define UASSERT_MSG(condition, msg_str)
Definition: ULogger.h:67
tango_gl::kIndices
static const GLushort kIndices[]
Definition: video_overlay.cpp:28
UWARN
#define UWARN(...)
pcl::TextureMapping::textureMeshwithMultipleCameras2
bool textureMeshwithMultipleCameras2(pcl::TextureMesh &mesh, const pcl::texture_mapping::CameraVector &cameras, const rtabmap::ProgressState *callback=0, std::vector< std::map< int, pcl::PointXY > > *vertexToPixels=0, bool distanceToCamPolicy=false)
Definition: texture_mapping.hpp:1049
f
Point2(* f)(const Point3 &, OptionalJacobian< 2, 3 >)
rtabmap::DBDriver
Definition: DBDriver.h:62
gtsam::Values::begin
deref_iterator begin() const
uFormat
std::string UTILITE_EXPORT uFormat(const char *fmt,...)
Definition: UConversion.cpp:365
ULogger::level
static ULogger::Level level()
Definition: ULogger.h:340
kdtree
kdtree
rtabmap::util3d::adjustNormalsToViewPoints
void RTABMAP_CORE_EXPORT adjustNormalsToViewPoints(const std::map< int, Transform > &poses, const pcl::PointCloud< pcl::PointXYZ >::Ptr &rawCloud, const std::vector< int > &rawCameraIndices, pcl::PointCloud< pcl::PointNormal >::Ptr &cloud, float groundNormalsUp=0.0f)
Definition: util3d_surface.cpp:3679
rtabmap::util3d::computeNormals
LaserScan computeNormals(const LaserScan &laserScan, int searchK, float searchRadius)
Definition: util3d_surface.cpp:2681
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
rtabmap::util3d::mls
pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr RTABMAP_CORE_EXPORT mls(const pcl::PointCloud< pcl::PointXYZRGB >::Ptr &cloud, float searchRadius=0.0f, int polygonialOrder=2, int upsamplingMethod=0, float upsamplingRadius=0.0f, float upsamplingStep=0.0f, int pointDensity=0, float dilationVoxelSize=1.0f, int dilationIterations=0)
Definition: util3d_surface.cpp:3403
rtabmap::util3d::createMesh
pcl::PolygonMesh::Ptr RTABMAP_CORE_EXPORT createMesh(const pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr &cloudWithNormals, float gp3SearchRadius=0.025, float gp3Mu=2.5, int gp3MaximumNearestNeighbors=100, float gp3MaximumSurfaceAngle=M_PI/4, float gp3MinimumAngle=M_PI/18, float gp3MaximumAngle=2 *M_PI/3, bool gp3NormalConsistency=true)
Definition: util3d_surface.cpp:555
ULogger.h
ULogger class and convenient macros.
a
ArrayXXi a
rtabmap::util3d::assemblePolygonMesh
pcl::PolygonMesh::Ptr RTABMAP_CORE_EXPORT assemblePolygonMesh(const cv::Mat &cloudMat, const std::vector< std::vector< RTABMAP_PCL_INDEX > > &polygons)
Definition: util3d_surface.cpp:1395
rtabmap::Transform
Definition: Transform.h:41
Memory.h
util2d.h
uSplit
std::list< std::string > uSplit(const std::string &str, char separator=' ')
Definition: UStl.h:564
rtabmap::EnvSensors
std::map< EnvSensor::Type, EnvSensor > EnvSensors
Definition: EnvSensor.h:81
uStr2Int
int UTILITE_EXPORT uStr2Int(const std::string &str)
Definition: UConversion.cpp:125
glm::rotation
GLM_FUNC_DECL detail::tquat< T, P > rotation(detail::tvec3< T, P > const &orig, detail::tvec3< T, P > const &dest)
rtabmap::DBDriver::getWeight
void getWeight(int signatureId, int &weight) const
Definition: DBDriver.cpp:859
UDirectory::setPath
void setPath(const std::string &path, const std::string &extensions="")
Definition: UDirectory.cpp:95
values
leaf::MyValues values
N
N
iter
iterator iter(handle obj)
pcl::TextureMapping::setMinClusterSize
void setMinClusterSize(int size)
Definition: texture_mapping.h:193
pcl::texture_mapping::Camera
Structure to store camera pose and focal length.
Definition: texture_mapping.h:68
rtabmap::Transform::isIdentity
bool isIdentity() const
Definition: Transform.cpp:136
c_str
const char * c_str(Args &&...args)
v
Array< int, Dynamic, 1 > v
UDEBUG
#define UDEBUG(...)
rtabmap::util3d::concatenateTextureMeshes
pcl::TextureMesh::Ptr RTABMAP_CORE_EXPORT concatenateTextureMeshes(const std::list< pcl::TextureMesh::Ptr > &meshes)
Definition: util3d_surface.cpp:1006
UTimer
Definition: UTimer.h:46
rtabmap::LaserScan::angleIncrement
float angleIncrement() const
Definition: LaserScan.h:125
epsilon
double epsilon
rtabmap::Memory
Definition: Memory.h:64
scan_step::second
@ second
float
float
rtabmap::util3d::laserScanToPointCloud
pcl::PointCloud< pcl::PointXYZ >::Ptr RTABMAP_CORE_EXPORT laserScanToPointCloud(const LaserScan &laserScan, const Transform &transform=Transform())
Definition: util3d.cpp:2269
Eigen::Matrix
distance
Double_ distance(const OrientedPlane3_ &p)
Vector2::x
float x
I
I
rtabmap::util2d::exposureFusion
cv::Mat RTABMAP_CORE_EXPORT exposureFusion(const std::vector< cv::Mat > &images)
Definition: util2d.cpp:2027
rtabmap::LaserScan::hasIntensity
bool hasIntensity() const
Definition: LaserScan.h:136
handle::ptr
PyObject *& ptr()
cols
int cols
rtabmap::util3d::convertPolygonsToPCL
std::vector< pcl::Vertices > RTABMAP_CORE_EXPORT convertPolygonsToPCL(const std::vector< std::vector< RTABMAP_PCL_INDEX > > &polygons)
Definition: util3d_surface.cpp:1249
rtabmap::util3d::fixTextureMeshForVisualization
void RTABMAP_CORE_EXPORT fixTextureMeshForVisualization(pcl::TextureMesh &textureMesh)
Definition: util3d_surface.cpp:2195
dist
dist
pcl::TextureMapping::setMaxAngle
void setMaxAngle(float maxAngle)
Definition: texture_mapping.h:187
t
Point2 t(10, 10)
rtabmap::util3d::laserScanToPointCloudRGBNormal
pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr RTABMAP_CORE_EXPORT laserScanToPointCloudRGBNormal(const LaserScan &laserScan, const Transform &transform=Transform(), unsigned char r=100, unsigned char g=100, unsigned char b=100)
Definition: util3d.cpp:2376
UFile.h
dst
char * dst
Definition: lz4.h:354
rtabmap
Definition: CameraARCore.cpp:35
UDirectory
Definition: UDirectory.h:34
pcl::texture_mapping::CameraVector
std::vector< Camera, Eigen::aligned_allocator< Camera > > CameraVector
Definition: texture_mapping.h:96
rtabmap::util3d::laserScanToPointCloudNormal
pcl::PointCloud< pcl::PointNormal >::Ptr RTABMAP_CORE_EXPORT laserScanToPointCloudNormal(const LaserScan &laserScan, const Transform &transform=Transform())
Definition: util3d.cpp:2296
UERROR
#define UERROR(...)
rtabmap::LaserScan::angleMin
float angleMin() const
Definition: LaserScan.h:123
pcl::OrganizedFastMesh::setTrianglePixelSize
void setTrianglePixelSize(int triangle_size)
Set the edge length (in pixels) used for constructing the fixed mesh.
Definition: organized_fast_mesh.h:139
rtabmap::util3d::computeFastOrganizedNormals2D
pcl::PointCloud< pcl::Normal >::Ptr RTABMAP_CORE_EXPORT computeFastOrganizedNormals2D(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, int searchK=5, float searchRadius=0.0f, const Eigen::Vector3f &viewPoint=Eigen::Vector3f(0, 0, 0))
Definition: util3d_surface.cpp:3035
trace.model
model
Definition: trace.py:4
glm::inverse
GLM_FUNC_DECL matType< T, P > inverse(matType< T, P > const &m)
fo
fo
value
value
i
int i
uIsFinite
bool uIsFinite(const T &value)
Definition: UMath.h:53
result
RESULT & result
rtabmap::LaserScan::rangeMin
float rangeMin() const
Definition: LaserScan.h:121
rtabmap::util3d::adjustNormalsToViewPointImpl
void adjustNormalsToViewPointImpl(typename pcl::PointCloud< PointNormalT >::Ptr &cloud, const Eigen::Vector3f &viewpoint, float groundNormalsUp)
Definition: util3d_surface.cpp:3556
rtabmap::util3d::filterCloseVerticesFromMesh
std::vector< pcl::Vertices > RTABMAP_CORE_EXPORT filterCloseVerticesFromMesh(const pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr cloud, const std::vector< pcl::Vertices > &polygons, float radius, float angle, bool keepLatestInRadius)
Definition: util3d_surface.cpp:458
v2
v2


rtabmap
Author(s): Mathieu Labbe
autogenerated on Mon Jul 1 2024 02:42:41