util3d_surface.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
31 #include "rtabmap/core/util3d.h"
32 #include "rtabmap/core/util2d.h"
33 #include "rtabmap/core/Memory.h"
34 #include "rtabmap/core/DBDriver.h"
38 #include "rtabmap/utilite/UFile.h"
40 #include "rtabmap/utilite/UMath.h"
41 #include "rtabmap/utilite/UTimer.h"
42 #include <opencv2/core/core_c.h>
43 #include <opencv2/imgproc/types_c.h>
44 #include <pcl/search/kdtree.h>
45 #include <pcl/surface/gp3.h>
46 #include <pcl/features/normal_3d_omp.h>
47 #include <pcl/surface/mls.h>
49 #include <pcl/features/integral_image_normal.h>
50 
51 #ifdef RTABMAP_ALICE_VISION
52 #include <aliceVision/sfmData/SfMData.hpp>
53 #include <aliceVision/sfmDataIO/sfmDataIO.hpp>
54 #include <aliceVision/mesh/Mesh.hpp>
55 #include <aliceVision/mesh/Texturing.hpp>
56 #include <aliceVision/camera/Pinhole.hpp>
57 #include <boost/algorithm/string.hpp>
58 using namespace aliceVision;
59 #endif
60 
61 #ifndef DISABLE_VTK
62 #include <pcl/surface/vtk_smoothing/vtk_mesh_quadric_decimation.h>
63 #endif
64 
65 #if PCL_VERSION_COMPARE(>, 1, 11, 1)
66 #include <pcl/types.h>
67 #endif
68 #if PCL_VERSION_COMPARE(<, 1, 8, 0)
70 #else
71 #include <pcl/surface/organized_fast_mesh.h>
72 #include <pcl/surface/impl/marching_cubes.hpp>
73 #include <pcl/surface/impl/organized_fast_mesh.hpp>
74 #include <pcl/impl/instantiate.hpp>
75 #include <pcl/point_types.h>
76 
77 // Instantiations of specific point types
78 PCL_INSTANTIATE(OrganizedFastMesh, (pcl::PointXYZRGBNormal))
79 
80 #include <pcl/features/impl/normal_3d_omp.hpp>
81 #if PCL_VERSION_COMPARE(<=, 1, 8, 0)
82 #ifdef PCL_ONLY_CORE_POINT_TYPES
83 PCL_INSTANTIATE_PRODUCT(NormalEstimationOMP, ((pcl::PointXYZRGB))((pcl::Normal)))
84 #endif
85 #endif
86 #endif
87 
88 namespace rtabmap
89 {
90 
91 namespace util3d
92 {
93 
95  const std::vector<pcl::Vertices> & polygons,
96  int cloudSize,
97  std::vector<std::set<int> > & neighbors,
98  std::vector<std::set<int> > & vertexToPolygons)
99 {
100  vertexToPolygons = std::vector<std::set<int> >(cloudSize);
101  neighbors = std::vector<std::set<int> >(polygons.size());
102 
103  for(unsigned int i=0; i<polygons.size(); ++i)
104  {
105  std::set<int> vertices(polygons[i].vertices.begin(), polygons[i].vertices.end());
106 
107  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
108  {
109  int v = polygons[i].vertices.at(j);
110  for(std::set<int>::iterator iter=vertexToPolygons[v].begin(); iter!=vertexToPolygons[v].end(); ++iter)
111  {
112  int numSharedVertices = 0;
113  for(unsigned int k=0; k<polygons.at(*iter).vertices.size() && numSharedVertices<2; ++k)
114  {
115  if(vertices.find(polygons.at(*iter).vertices.at(k)) != vertices.end())
116  {
117  ++numSharedVertices;
118  }
119  }
120  if(numSharedVertices >= 2)
121  {
122  neighbors[*iter].insert(i);
123  neighbors[i].insert(*iter);
124  }
125  }
126  vertexToPolygons[v].insert(i);
127  }
128  }
129 }
130 
131 std::list<std::list<int> > clusterPolygons(
132  const std::vector<std::set<int> > & neighborPolygons,
133  int minClusterSize)
134 {
135  std::set<int> polygonsChecked;
136 
137  std::list<std::list<int> > clusters;
138 
139  for(unsigned int i=0; i<neighborPolygons.size(); ++i)
140  {
141  if(polygonsChecked.find(i) == polygonsChecked.end())
142  {
143  std::list<int> currentCluster;
144  currentCluster.push_back(i);
145  polygonsChecked.insert(i);
146 
147  for(std::list<int>::iterator iter=currentCluster.begin(); iter!=currentCluster.end(); ++iter)
148  {
149  // get neighbor polygons
150  std::set<int> neighbors = neighborPolygons[*iter];
151  for(std::set<int>::iterator jter=neighbors.begin(); jter!=neighbors.end(); ++jter)
152  {
153  if(polygonsChecked.insert(*jter).second)
154  {
155  currentCluster.push_back(*jter);
156  }
157  }
158  }
159  if((int)currentCluster.size() > minClusterSize)
160  {
161  clusters.push_back(currentCluster);
162  }
163  }
164  }
165  return clusters;
166 }
167 
168 std::vector<pcl::Vertices> organizedFastMesh(
169  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
170  double angleTolerance,
171  bool quad,
172  int trianglePixelSize,
173  const Eigen::Vector3f & viewpoint)
174 {
175  UDEBUG("size=%d angle=%f quad=%d triangleSize=%d", (int)cloud->size(), angleTolerance, quad?1:0, trianglePixelSize);
176  UASSERT(cloud->is_dense == false);
177  UASSERT(cloud->width > 1 && cloud->height > 1);
178 
180  ofm.setTrianglePixelSize (trianglePixelSize);
182  ofm.setInputCloud (cloud);
183  ofm.setAngleTolerance(angleTolerance);
184  ofm.setViewpoint(viewpoint);
185 
186  std::vector<pcl::Vertices> vertices;
187  ofm.reconstruct (vertices);
188 
189  if(quad)
190  {
191  //flip all polygons (right handed)
192  std::vector<pcl::Vertices> output(vertices.size());
193  for(unsigned int i=0; i<vertices.size(); ++i)
194  {
195  output[i].vertices.resize(4);
196  output[i].vertices[0] = vertices[i].vertices[0];
197  output[i].vertices[3] = vertices[i].vertices[1];
198  output[i].vertices[2] = vertices[i].vertices[2];
199  output[i].vertices[1] = vertices[i].vertices[3];
200  }
201  return output;
202  }
203 
204  return vertices;
205 }
206 std::vector<pcl::Vertices> organizedFastMesh(
207  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
208  double angleTolerance,
209  bool quad,
210  int trianglePixelSize,
211  const Eigen::Vector3f & viewpoint)
212 {
213  UDEBUG("size=%d angle=%f quad=%d triangleSize=%d", (int)cloud->size(), angleTolerance, quad?1:0, trianglePixelSize);
214  UASSERT(cloud->is_dense == false);
215  UASSERT(cloud->width > 1 && cloud->height > 1);
216 
218  ofm.setTrianglePixelSize (trianglePixelSize);
220  ofm.setInputCloud (cloud);
221  ofm.setAngleTolerance(angleTolerance);
222  ofm.setViewpoint(viewpoint);
223 
224  std::vector<pcl::Vertices> vertices;
225  ofm.reconstruct (vertices);
226 
227  if(quad)
228  {
229  //flip all polygons (right handed)
230  std::vector<pcl::Vertices> output(vertices.size());
231  for(unsigned int i=0; i<vertices.size(); ++i)
232  {
233  output[i].vertices.resize(4);
234  output[i].vertices[0] = vertices[i].vertices[0];
235  output[i].vertices[3] = vertices[i].vertices[1];
236  output[i].vertices[2] = vertices[i].vertices[2];
237  output[i].vertices[1] = vertices[i].vertices[3];
238  }
239  return output;
240  }
241 
242  return vertices;
243 }
244 std::vector<pcl::Vertices> organizedFastMesh(
245  const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
246  double angleTolerance,
247  bool quad,
248  int trianglePixelSize,
249  const Eigen::Vector3f & viewpoint)
250 {
251  UDEBUG("size=%d angle=%f quad=%d triangleSize=%d", (int)cloud->size(), angleTolerance, quad?1:0, trianglePixelSize);
252  UASSERT(cloud->is_dense == false);
253  UASSERT(cloud->width > 1 && cloud->height > 1);
254 
256  ofm.setTrianglePixelSize (trianglePixelSize);
258  ofm.setInputCloud (cloud);
259  ofm.setAngleTolerance(angleTolerance);
260  ofm.setViewpoint(viewpoint);
261 
262  std::vector<pcl::Vertices> vertices;
263  ofm.reconstruct (vertices);
264 
265  if(quad)
266  {
267  //flip all polygons (right handed)
268  std::vector<pcl::Vertices> output(vertices.size());
269  for(unsigned int i=0; i<vertices.size(); ++i)
270  {
271  output[i].vertices.resize(4);
272  output[i].vertices[0] = vertices[i].vertices[0];
273  output[i].vertices[3] = vertices[i].vertices[1];
274  output[i].vertices[2] = vertices[i].vertices[2];
275  output[i].vertices[1] = vertices[i].vertices[3];
276  }
277  return output;
278  }
279 
280  return vertices;
281 }
282 
284  pcl::PointCloud<pcl::PointXYZRGBNormal> & cloudA,
285  std::vector<pcl::Vertices> & polygonsA,
286  const pcl::PointCloud<pcl::PointXYZRGBNormal> & cloudB,
287  const std::vector<pcl::Vertices> & polygonsB)
288 {
289  UDEBUG("cloudA=%d polygonsA=%d cloudB=%d polygonsB=%d", (int)cloudA.size(), (int)polygonsA.size(), (int)cloudB.size(), (int)polygonsB.size());
290  UASSERT(!cloudA.isOrganized() && !cloudB.isOrganized());
291 
292  int sizeA = (int)cloudA.size();
293  cloudA += cloudB;
294 
295  int sizePolygonsA = (int)polygonsA.size();
296  polygonsA.resize(sizePolygonsA+polygonsB.size());
297 
298  for(unsigned int i=0; i<polygonsB.size(); ++i)
299  {
300  pcl::Vertices vertices = polygonsB[i];
301  for(unsigned int j=0; j<vertices.vertices.size(); ++j)
302  {
303  vertices.vertices[j] += sizeA;
304  }
305  polygonsA[i+sizePolygonsA] = vertices;
306  }
307 }
308 
310  pcl::PointCloud<pcl::PointXYZRGB> & cloudA,
311  std::vector<pcl::Vertices> & polygonsA,
312  const pcl::PointCloud<pcl::PointXYZRGB> & cloudB,
313  const std::vector<pcl::Vertices> & polygonsB)
314 {
315  UDEBUG("cloudA=%d polygonsA=%d cloudB=%d polygonsB=%d", (int)cloudA.size(), (int)polygonsA.size(), (int)cloudB.size(), (int)polygonsB.size());
316  UASSERT(!cloudA.isOrganized() && !cloudB.isOrganized());
317 
318  int sizeA = (int)cloudA.size();
319  cloudA += cloudB;
320 
321  int sizePolygonsA = (int)polygonsA.size();
322  polygonsA.resize(sizePolygonsA+polygonsB.size());
323 
324  for(unsigned int i=0; i<polygonsB.size(); ++i)
325  {
326  pcl::Vertices vertices = polygonsB[i];
327  for(unsigned int j=0; j<vertices.vertices.size(); ++j)
328  {
329  vertices.vertices[j] += sizeA;
330  }
331  polygonsA[i+sizePolygonsA] = vertices;
332  }
333 }
334 
336  const pcl::PointCloud<pcl::PointXYZRGBNormal> & cloud,
337  const std::vector<pcl::Vertices> & polygons,
338  pcl::PointCloud<pcl::PointXYZRGBNormal> & outputCloud,
339  std::vector<pcl::Vertices> & outputPolygons)
340 {
341  UDEBUG("size=%d polygons=%d", (int)cloud.size(), (int)polygons.size());
342  std::map<int, int> addedVertices; //<oldIndex, newIndex>
343  std::vector<int> output; //<oldIndex>
344  output.resize(cloud.size());
345  outputCloud.resize(cloud.size());
346  outputCloud.is_dense = true;
347  outputPolygons.resize(polygons.size());
348  int oi = 0;
349  for(unsigned int i=0; i<polygons.size(); ++i)
350  {
351  pcl::Vertices & v = outputPolygons[i];
352  v.vertices.resize(polygons[i].vertices.size());
353  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
354  {
355  std::map<int, int>::iterator iter = addedVertices.find(polygons[i].vertices[j]);
356  if(iter == addedVertices.end())
357  {
358  outputCloud[oi] = cloud.at(polygons[i].vertices[j]);
359  addedVertices.insert(std::make_pair(polygons[i].vertices[j], oi));
360  output[oi] = polygons[i].vertices[j];
361  v.vertices[j] = oi++;
362  }
363  else
364  {
365  v.vertices[j] = iter->second;
366  }
367  }
368  }
369  outputCloud.resize(oi);
370  output.resize(oi);
371 
372  return output;
373 }
374 
376  const pcl::PointCloud<pcl::PointXYZRGB> & cloud,
377  const std::vector<pcl::Vertices> & polygons,
378  pcl::PointCloud<pcl::PointXYZRGB> & outputCloud,
379  std::vector<pcl::Vertices> & outputPolygons)
380 {
381  UDEBUG("size=%d polygons=%d", (int)cloud.size(), (int)polygons.size());
382  std::map<int, int> addedVertices; //<oldIndex, newIndex>
383  std::vector<int> output; //<oldIndex>
384  output.resize(cloud.size());
385  outputCloud.resize(cloud.size());
386  outputCloud.is_dense = true;
387  outputPolygons.resize(polygons.size());
388  int oi = 0;
389  for(unsigned int i=0; i<polygons.size(); ++i)
390  {
391  pcl::Vertices & v = outputPolygons[i];
392  v.vertices.resize(polygons[i].vertices.size());
393  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
394  {
395  std::map<int, int>::iterator iter = addedVertices.find(polygons[i].vertices[j]);
396  if(iter == addedVertices.end())
397  {
398  outputCloud[oi] = cloud.at(polygons[i].vertices[j]);
399  addedVertices.insert(std::make_pair(polygons[i].vertices[j], oi));
400  output[oi] = polygons[i].vertices[j];
401  v.vertices[j] = oi++;
402  }
403  else
404  {
405  v.vertices[j] = iter->second;
406  }
407  }
408  }
409  outputCloud.resize(oi);
410  output.resize(oi);
411 
412  return output;
413 }
414 
415 std::vector<int> filterNaNPointsFromMesh(
416  const pcl::PointCloud<pcl::PointXYZRGB> & cloud,
417  const std::vector<pcl::Vertices> & polygons,
418  pcl::PointCloud<pcl::PointXYZRGB> & outputCloud,
419  std::vector<pcl::Vertices> & outputPolygons)
420 {
421  UDEBUG("size=%d polygons=%d", (int)cloud.size(), (int)polygons.size());
422  std::map<int, int> addedVertices; //<oldIndex, newIndex>
423  std::vector<int> output; //<oldIndex>
424  output.resize(cloud.size());
425  outputCloud.resize(cloud.size());
426  outputCloud.is_dense = true;
427  std::vector<int> organizedToDense(cloud.size(), -1);
428 
429  int oi = 0;
430  for(unsigned int i=0; i<cloud.size(); ++i)
431  {
432  if(pcl::isFinite(cloud.at(i)))
433  {
434  outputCloud.at(oi) = cloud.at(i);
435  output[oi] = i;
436  organizedToDense[i] = oi;
437  ++oi;
438  }
439  }
440  outputCloud.resize(oi);
441  output.resize(oi);
442 
443  // remap polygons to dense cloud
444  outputPolygons = polygons;
445  for(unsigned int i=0; i<outputPolygons.size(); ++i)
446  {
447  pcl::Vertices & v = outputPolygons[i];
448  for(unsigned int j=0; j<v.vertices.size(); ++j)
449  {
450  UASSERT(organizedToDense[v.vertices[j]] >= 0);
451  v.vertices[j] = organizedToDense[v.vertices[j]];
452  }
453  }
454 
455  return output;
456 }
457 
458 std::vector<pcl::Vertices> filterCloseVerticesFromMesh(
459  const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud,
460  const std::vector<pcl::Vertices> & polygons,
461  float radius,
462  float angle, // FIXME angle not used
463  bool keepLatestInRadius)
464 {
465  UDEBUG("size=%d polygons=%d radius=%f angle=%f keepLatest=%d",
466  (int)cloud->size(), (int)polygons.size(), radius, angle, keepLatestInRadius?1:0);
467  std::vector<pcl::Vertices> outputPolygons;
468  pcl::KdTreeFLANN<pcl::PointXYZRGBNormal>::Ptr kdtree(new pcl::KdTreeFLANN<pcl::PointXYZRGBNormal>);
469  kdtree->setInputCloud(cloud);
470 
471  std::map<int, int> verticesDone;
472  outputPolygons = polygons;
473  for(unsigned int i=0; i<outputPolygons.size(); ++i)
474  {
475  pcl::Vertices & polygon = outputPolygons[i];
476  for(unsigned int j=0; j<polygon.vertices.size(); ++j)
477  {
478  std::map<int, int>::iterator iter = verticesDone.find(polygon.vertices[j]);
479  if(iter != verticesDone.end())
480  {
481  polygon.vertices[j] = iter->second;
482  }
483  else
484  {
485  std::vector<int> kIndices;
486  std::vector<float> kDistances;
487  kdtree->radiusSearch(polygon.vertices[j], radius, kIndices, kDistances);
488  if(kIndices.size())
489  {
490  int reference = -1;
491  for(unsigned int z=0; z<kIndices.size(); ++z)
492  {
493  if(reference == -1)
494  {
495  reference = kIndices[z];
496  }
497  else if(keepLatestInRadius)
498  {
499  if(kIndices[z] < reference)
500  {
501  reference = kIndices[z];
502  }
503  }
504  else
505  {
506  if(kIndices[z] > reference)
507  {
508  reference = kIndices[z];
509  }
510  }
511  }
512  if(reference >= 0)
513  {
514  for(unsigned int z=0; z<kIndices.size(); ++z)
515  {
516  verticesDone.insert(std::make_pair(kIndices[j], reference));
517  }
518  polygon.vertices[j] = reference;
519  }
520  }
521  else
522  {
523  verticesDone.insert(std::make_pair(polygon.vertices[j], polygon.vertices[j]));
524  }
525  }
526  }
527  }
528  return outputPolygons;
529 }
530 
531 std::vector<pcl::Vertices> filterInvalidPolygons(const std::vector<pcl::Vertices> & polygons)
532 {
533  std::vector<pcl::Vertices> output(polygons.size());
534  int oi=0;
535  for(unsigned int i=0; i<polygons.size(); ++i)
536  {
537  bool valid = true;
538  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
539  {
540  if(polygons[i].vertices[j] == polygons[i].vertices[(j+1)%polygons[i].vertices.size()])
541  {
542  valid = false;
543  break;
544  }
545  }
546  if(valid)
547  {
548  output[oi++] = polygons[i];
549  }
550  }
551  output.resize(oi);
552  return output;
553 }
554 
555 pcl::PolygonMesh::Ptr createMesh(
556  const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloudWithNormals,
557  float gp3SearchRadius,
558  float gp3Mu,
559  int gp3MaximumNearestNeighbors,
560  float gp3MaximumSurfaceAngle,
561  float gp3MinimumAngle,
562  float gp3MaximumAngle,
563  bool gp3NormalConsistency)
564 {
565  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloudWithNormalsNoNaN = removeNaNNormalsFromPointCloud(cloudWithNormals);
566 
567  // Create search tree*
568  pcl::search::KdTree<pcl::PointXYZRGBNormal>::Ptr tree2 (new pcl::search::KdTree<pcl::PointXYZRGBNormal>);
569  tree2->setInputCloud (cloudWithNormalsNoNaN);
570 
571  // Initialize objects
572  pcl::GreedyProjectionTriangulation<pcl::PointXYZRGBNormal> gp3;
573  pcl::PolygonMesh::Ptr mesh(new pcl::PolygonMesh);
574 
575  // Set the maximum distance between connected points (maximum edge length)
576  gp3.setSearchRadius (gp3SearchRadius);
577 
578  // Set typical values for the parameters
579  gp3.setMu (gp3Mu);
580  gp3.setMaximumNearestNeighbors (gp3MaximumNearestNeighbors);
581  gp3.setMaximumSurfaceAngle(gp3MaximumSurfaceAngle); // 45 degrees
582  gp3.setMinimumAngle(gp3MinimumAngle); // 10 degrees
583  gp3.setMaximumAngle(gp3MaximumAngle); // 120 degrees
584  gp3.setNormalConsistency(gp3NormalConsistency);
585  gp3.setConsistentVertexOrdering(gp3NormalConsistency);
586 
587  // Get result
588  gp3.setInputCloud (cloudWithNormalsNoNaN);
589  gp3.setSearchMethod (tree2);
590  gp3.reconstruct (*mesh);
591 
592  //UASSERT(mesh->cloud.data.size()/mesh->cloud.point_step == cloudWithNormalsNoNaN->size());
593  //mesh->polygons = normalizePolygonsSide(*cloudWithNormalsNoNaN, mesh->polygons);
594 
595  return mesh;
596 }
597 
599  const std::map<int, Transform> & poses,
600  const std::map<int, std::vector<CameraModel> > & cameraModels,
601  const std::map<int, cv::Mat> & cameraDepths,
602  const std::vector<float> & roiRatios)
603 {
604  UASSERT(roiRatios.empty() || roiRatios.size() == 4);
606 
607  for(std::map<int, Transform>::const_iterator poseIter=poses.begin(); poseIter!=poses.end(); ++poseIter)
608  {
609  std::map<int, std::vector<CameraModel> >::const_iterator modelIter=cameraModels.find(poseIter->first);
610 
611  if(modelIter!=cameraModels.end())
612  {
613  std::map<int, cv::Mat>::const_iterator depthIter = cameraDepths.find(poseIter->first);
614 
615  // for each sub camera
616  for(unsigned int i=0; i<modelIter->second.size(); ++i)
617  {
619  // should be in camera frame
620  UASSERT(!modelIter->second[i].localTransform().isNull() && !poseIter->second.isNull());
621  Transform t = poseIter->second*modelIter->second[i].localTransform();
622 
623  cam.pose = t.toEigen3f();
624 
625  if(modelIter->second[i].imageHeight() <=0 || modelIter->second[i].imageWidth() <=0)
626  {
627  UERROR("Should have camera models with width/height set to create texture cameras!");
629  }
630 
631  UASSERT(modelIter->second[i].fx()>0 && modelIter->second[i].imageHeight()>0 && modelIter->second[i].imageWidth()>0);
632  cam.focal_length_w=modelIter->second[i].fx();
633  cam.focal_length_h=modelIter->second[i].fy();
634  cam.center_w=modelIter->second[i].cx();
635  cam.center_h=modelIter->second[i].cy();
636  cam.height=modelIter->second[i].imageHeight();
637  cam.width=modelIter->second[i].imageWidth();
638  if(modelIter->second.size() == 1)
639  {
640  cam.texture_file = uFormat("%d", poseIter->first); // camera index
641  }
642  else
643  {
644  cam.texture_file = uFormat("%d_%d", poseIter->first, (int)i); // camera index, sub camera model index
645  }
646  if(!roiRatios.empty())
647  {
648  cam.roi.resize(4);
649  cam.roi[0] = cam.width * roiRatios[0]; // left -> x
650  cam.roi[1] = cam.height * roiRatios[2]; // top -> y
651  cam.roi[2] = cam.width * (1.0 - roiRatios[1]) - cam.roi[0]; // right -> width
652  cam.roi[3] = cam.height * (1.0 - roiRatios[3]) - cam.roi[1]; // bottom -> height
653  }
654 
655  if(depthIter != cameraDepths.end() && !depthIter->second.empty())
656  {
657  UASSERT(depthIter->second.type() == CV_32FC1 || depthIter->second.type() == CV_16UC1);
658  UASSERT(depthIter->second.cols % modelIter->second.size() == 0);
659  int subWidth = depthIter->second.cols/(modelIter->second.size());
660  cam.depth = cv::Mat(depthIter->second, cv::Range(0, depthIter->second.rows), cv::Range(subWidth*i, subWidth*(i+1)));
661  }
662 
663  UDEBUG("%f", cam.focal_length);
664  UDEBUG("%f", cam.height);
665  UDEBUG("%f", cam.width);
666  UDEBUG("cam.pose=%s", t.prettyPrint().c_str());
667 
668  cameras.push_back(cam);
669  }
670  }
671  }
672  return cameras;
673 }
674 
675 pcl::TextureMesh::Ptr createTextureMesh(
676  const pcl::PolygonMesh::Ptr & mesh,
677  const std::map<int, Transform> & poses,
678  const std::map<int, CameraModel> & cameraModels,
679  const std::map<int, cv::Mat> & cameraDepths,
680  float maxDistance,
681  float maxDepthError,
682  float maxAngle,
683  int minClusterSize,
684  const std::vector<float> & roiRatios,
685  const ProgressState * state,
686  std::vector<std::map<int, pcl::PointXY> > * vertexToPixels,
687  bool distanceToCamPolicy)
688 {
689  std::map<int, std::vector<CameraModel> > cameraSubModels;
690  for(std::map<int, CameraModel>::const_iterator iter=cameraModels.begin(); iter!=cameraModels.end(); ++iter)
691  {
692  std::vector<CameraModel> models;
693  models.push_back(iter->second);
694  cameraSubModels.insert(std::make_pair(iter->first, models));
695  }
696 
697  return createTextureMesh(
698  mesh,
699  poses,
700  cameraSubModels,
701  cameraDepths,
702  maxDistance,
704  maxAngle,
705  minClusterSize,
706  roiRatios,
707  state,
708  vertexToPixels,
709  distanceToCamPolicy);
710 }
711 
712 pcl::TextureMesh::Ptr createTextureMesh(
713  const pcl::PolygonMesh::Ptr & mesh,
714  const std::map<int, Transform> & poses,
715  const std::map<int, std::vector<CameraModel> > & cameraModels,
716  const std::map<int, cv::Mat> & cameraDepths,
717  float maxDistance,
718  float maxDepthError,
719  float maxAngle,
720  int minClusterSize,
721  const std::vector<float> & roiRatios,
722  const ProgressState * state,
723  std::vector<std::map<int, pcl::PointXY> > * vertexToPixels,
724  bool distanceToCamPolicy)
725 {
726  UASSERT(mesh->polygons.size());
727  pcl::TextureMesh::Ptr textureMesh(new pcl::TextureMesh);
728  textureMesh->cloud = mesh->cloud;
729  textureMesh->tex_polygons.push_back(mesh->polygons);
730 
731  // Original from pcl/gpu/kinfu_large_scale/tools/standalone_texture_mapping.cpp:
732  // Author: Raphael Favier, Technical University Eindhoven, (r.mysurname <aT> tue.nl)
733 
734  // Create the texturemesh object that will contain our UV-mapped mesh
735 
736  // create cameras
738  poses,
739  cameraModels,
740  cameraDepths,
741  roiRatios);
742 
743  // Create materials for each texture (and one extra for occluded faces)
744  textureMesh->tex_materials.resize (cameras.size () + 1);
745  for(unsigned int i = 0 ; i <= cameras.size() ; ++i)
746  {
747  pcl::TexMaterial mesh_material;
748  mesh_material.tex_Ka.r = 0.2f;
749  mesh_material.tex_Ka.g = 0.2f;
750  mesh_material.tex_Ka.b = 0.2f;
751 
752  mesh_material.tex_Kd.r = 0.8f;
753  mesh_material.tex_Kd.g = 0.8f;
754  mesh_material.tex_Kd.b = 0.8f;
755 
756  mesh_material.tex_Ks.r = 1.0f;
757  mesh_material.tex_Ks.g = 1.0f;
758  mesh_material.tex_Ks.b = 1.0f;
759 
760  mesh_material.tex_d = 1.0f;
761  mesh_material.tex_Ns = 75.0f;
762  mesh_material.tex_illum = 2;
763 
764  std::stringstream tex_name;
765  tex_name << "material_" << i;
766  tex_name >> mesh_material.tex_name;
767 
768  if(i < cameras.size ())
769  {
770  mesh_material.tex_file = cameras[i].texture_file;
771  }
772  else
773  {
774  mesh_material.tex_file = "occluded";
775  }
776 
777  textureMesh->tex_materials[i] = mesh_material;
778  }
779 
780  // Texture by projection
781  pcl::TextureMapping<pcl::PointXYZ> tm; // TextureMapping object that will perform the sort
782  tm.setMaxDistance(maxDistance);
783  tm.setMaxAngle(maxAngle);
784  if(maxDepthError > 0.0f)
785  {
787  }
788  tm.setMinClusterSize(minClusterSize);
789  if(tm.textureMeshwithMultipleCameras2(*textureMesh, cameras, state, vertexToPixels, distanceToCamPolicy))
790  {
791  // compute normals for the mesh if not already here
792  bool hasNormals = false;
793  bool hasColors = false;
794  for(unsigned int i=0; i<textureMesh->cloud.fields.size(); ++i)
795  {
796  if(textureMesh->cloud.fields[i].name.compare("normal_x") == 0)
797  {
798  hasNormals = true;
799  }
800  else if(textureMesh->cloud.fields[i].name.compare("rgb") == 0)
801  {
802  hasColors = true;
803  }
804  }
805  if(!hasNormals)
806  {
807  // use polygons
808  if(hasColors)
809  {
810  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZRGBNormal>);
811  pcl::fromPCLPointCloud2(mesh->cloud, *cloud);
812 
813  for(unsigned int i=0; i<mesh->polygons.size(); ++i)
814  {
815  pcl::Vertices & v = mesh->polygons[i];
816  UASSERT(v.vertices.size()>2);
817  Eigen::Vector3f v0(
818  cloud->at(v.vertices[1]).x - cloud->at(v.vertices[0]).x,
819  cloud->at(v.vertices[1]).y - cloud->at(v.vertices[0]).y,
820  cloud->at(v.vertices[1]).z - cloud->at(v.vertices[0]).z);
821  int last = v.vertices.size()-1;
822  Eigen::Vector3f v1(
823  cloud->at(v.vertices[last]).x - cloud->at(v.vertices[0]).x,
824  cloud->at(v.vertices[last]).y - cloud->at(v.vertices[0]).y,
825  cloud->at(v.vertices[last]).z - cloud->at(v.vertices[0]).z);
826  Eigen::Vector3f normal = v0.cross(v1);
827  normal.normalize();
828  // flat normal (per face)
829  for(unsigned int j=0; j<v.vertices.size(); ++j)
830  {
831  cloud->at(v.vertices[j]).normal_x = normal[0];
832  cloud->at(v.vertices[j]).normal_y = normal[1];
833  cloud->at(v.vertices[j]).normal_z = normal[2];
834  }
835  }
836  pcl::toPCLPointCloud2 (*cloud, textureMesh->cloud);
837  }
838  else
839  {
840  pcl::PointCloud<pcl::PointNormal>::Ptr cloud (new pcl::PointCloud<pcl::PointNormal>);
841  pcl::fromPCLPointCloud2(mesh->cloud, *cloud);
842 
843  for(unsigned int i=0; i<mesh->polygons.size(); ++i)
844  {
845  pcl::Vertices & v = mesh->polygons[i];
846  UASSERT(v.vertices.size()>2);
847  Eigen::Vector3f v0(
848  cloud->at(v.vertices[1]).x - cloud->at(v.vertices[0]).x,
849  cloud->at(v.vertices[1]).y - cloud->at(v.vertices[0]).y,
850  cloud->at(v.vertices[1]).z - cloud->at(v.vertices[0]).z);
851  int last = v.vertices.size()-1;
852  Eigen::Vector3f v1(
853  cloud->at(v.vertices[last]).x - cloud->at(v.vertices[0]).x,
854  cloud->at(v.vertices[last]).y - cloud->at(v.vertices[0]).y,
855  cloud->at(v.vertices[last]).z - cloud->at(v.vertices[0]).z);
856  Eigen::Vector3f normal = v0.cross(v1);
857  normal.normalize();
858  // flat normal (per face)
859  for(unsigned int j=0; j<v.vertices.size(); ++j)
860  {
861  cloud->at(v.vertices[j]).normal_x = normal[0];
862  cloud->at(v.vertices[j]).normal_y = normal[1];
863  cloud->at(v.vertices[j]).normal_z = normal[2];
864  }
865  }
866  pcl::toPCLPointCloud2 (*cloud, textureMesh->cloud);
867  }
868  }
869  }
870  return textureMesh;
871 }
872 
874  pcl::TextureMesh & textureMesh,
875  int minClusterSize)
876 {
877  UDEBUG("minClusterSize=%d", minClusterSize);
878  // Remove occluded polygons (polygons with no texture)
879  if(textureMesh.tex_coordinates.size())
880  {
881  // assume last texture is the occluded texture
882  textureMesh.tex_coordinates.pop_back();
883  textureMesh.tex_polygons.pop_back();
884  textureMesh.tex_materials.pop_back();
885 
886  if(minClusterSize!=0)
887  {
888  // concatenate all polygons
889  unsigned int totalSize = 0;
890  for(unsigned int t=0; t<textureMesh.tex_polygons.size(); ++t)
891  {
892  totalSize+=textureMesh.tex_polygons[t].size();
893  }
894  std::vector<pcl::Vertices> allPolygons(totalSize);
895  int oi=0;
896  for(unsigned int t=0; t<textureMesh.tex_polygons.size(); ++t)
897  {
898  for(unsigned int i=0; i<textureMesh.tex_polygons[t].size(); ++i)
899  {
900  allPolygons[oi++] = textureMesh.tex_polygons[t][i];
901  }
902  }
903 
904  // filter polygons
905  std::vector<std::set<int> > neighbors;
906  std::vector<std::set<int> > vertexToPolygons;
907  util3d::createPolygonIndexes(allPolygons,
908  (int)textureMesh.cloud.data.size()/textureMesh.cloud.point_step,
909  neighbors,
910  vertexToPolygons);
911 
912  std::list<std::list<int> > clusters = util3d::clusterPolygons(
913  neighbors,
914  minClusterSize<0?0:minClusterSize);
915 
916  std::set<int> validPolygons;
917  if(minClusterSize < 0)
918  {
919  // only keep the biggest cluster
920  std::list<std::list<int> >::iterator biggestClusterIndex = clusters.end();
921  unsigned int biggestClusterSize = 0;
922  for(std::list<std::list<int> >::iterator iter=clusters.begin(); iter!=clusters.end(); ++iter)
923  {
924  if(iter->size() > biggestClusterSize)
925  {
926  biggestClusterIndex = iter;
927  biggestClusterSize = iter->size();
928  }
929  }
930  if(biggestClusterIndex != clusters.end())
931  {
932  for(std::list<int>::iterator jter=biggestClusterIndex->begin(); jter!=biggestClusterIndex->end(); ++jter)
933  {
934  validPolygons.insert(*jter);
935  }
936  }
937  }
938  else
939  {
940  for(std::list<std::list<int> >::iterator iter=clusters.begin(); iter!=clusters.end(); ++iter)
941  {
942  for(std::list<int>::iterator jter=iter->begin(); jter!=iter->end(); ++jter)
943  {
944  validPolygons.insert(*jter);
945  }
946  }
947  }
948 
949  if(validPolygons.size() == 0)
950  {
951  UWARN("All %d polygons filtered after polygon cluster filtering. Cluster minimum size is %d.",totalSize, minClusterSize);
952  }
953 
954  // for each texture
955  unsigned int allPolygonsIndex = 0;
956  for(unsigned int t=0; t<textureMesh.tex_polygons.size(); ++t)
957  {
958  std::vector<pcl::Vertices> filteredPolygons(textureMesh.tex_polygons[t].size());
959 #if PCL_VERSION_COMPARE(>=, 1, 8, 0)
960  std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > filteredCoordinates(textureMesh.tex_coordinates[t].size());
961 #else
962  std::vector<Eigen::Vector2f> filteredCoordinates(textureMesh.tex_coordinates[t].size());
963 #endif
964 
965  if(textureMesh.tex_polygons[t].size())
966  {
967  UASSERT_MSG(allPolygonsIndex < allPolygons.size(), uFormat("%d vs %d", (int)allPolygonsIndex, (int)allPolygons.size()).c_str());
968 
969  // make index polygon to coordinate
970  std::vector<unsigned int> polygonToCoord(textureMesh.tex_polygons[t].size());
971  unsigned int totalCoord = 0;
972  for(unsigned int i=0; i<textureMesh.tex_polygons[t].size(); ++i)
973  {
974  polygonToCoord[i] = totalCoord;
975  totalCoord+=textureMesh.tex_polygons[t][i].vertices.size();
976  }
977  UASSERT_MSG(totalCoord == textureMesh.tex_coordinates[t].size(), uFormat("%d vs %d", totalCoord, (int)textureMesh.tex_coordinates[t].size()).c_str());
978 
979  int oi=0;
980  int ci=0;
981  for(unsigned int i=0; i<textureMesh.tex_polygons[t].size(); ++i)
982  {
983  if(validPolygons.find(allPolygonsIndex) != validPolygons.end())
984  {
985  filteredPolygons[oi] = textureMesh.tex_polygons[t].at(i);
986  for(unsigned int j=0; j<filteredPolygons[oi].vertices.size(); ++j)
987  {
988  UASSERT(polygonToCoord[i] < textureMesh.tex_coordinates[t].size());
989  filteredCoordinates[ci] = textureMesh.tex_coordinates[t][polygonToCoord[i]+j];
990  ++ci;
991  }
992  ++oi;
993  }
994  ++allPolygonsIndex;
995  }
996  filteredPolygons.resize(oi);
997  filteredCoordinates.resize(ci);
998  textureMesh.tex_polygons[t] = filteredPolygons;
999  textureMesh.tex_coordinates[t] = filteredCoordinates;
1000  }
1001  }
1002  }
1003  }
1004 }
1005 
1006 pcl::TextureMesh::Ptr concatenateTextureMeshes(const std::list<pcl::TextureMesh::Ptr> & meshes)
1007 {
1008  pcl::TextureMesh::Ptr output(new pcl::TextureMesh);
1009  std::map<std::string, int> addedMaterials; //<file, index>
1010  for(std::list<pcl::TextureMesh::Ptr>::const_iterator iter = meshes.begin(); iter!=meshes.end(); ++iter)
1011  {
1012  if((*iter)->cloud.point_step &&
1013  (*iter)->cloud.data.size()/(*iter)->cloud.point_step &&
1014  (*iter)->tex_polygons.size() &&
1015  (*iter)->tex_coordinates.size())
1016  {
1017  // append point cloud
1018  int polygonStep = output->cloud.height * output->cloud.width;
1019  pcl::PCLPointCloud2 tmp;
1020 #if PCL_VERSION_COMPARE(>=, 1, 10, 0)
1021  pcl::concatenate(output->cloud, iter->get()->cloud, tmp);
1022 #else
1023  pcl::concatenatePointCloud(output->cloud, iter->get()->cloud, tmp);
1024 #endif
1025  output->cloud = tmp;
1026 
1027  UASSERT((*iter)->tex_polygons.size() == (*iter)->tex_coordinates.size() &&
1028  (*iter)->tex_polygons.size() == (*iter)->tex_materials.size());
1029 
1030  int materialCount = (*iter)->tex_polygons.size();
1031  for(int i=0; i<materialCount; ++i)
1032  {
1033  std::map<std::string, int>::iterator jter = addedMaterials.find((*iter)->tex_materials[i].tex_file);
1034  int index;
1035  if(jter != addedMaterials.end())
1036  {
1037  index = jter->second;
1038  }
1039  else
1040  {
1041  addedMaterials.insert(std::make_pair((*iter)->tex_materials[i].tex_file, output->tex_materials.size()));
1042  index = output->tex_materials.size();
1043  output->tex_materials.push_back((*iter)->tex_materials[i]);
1044  output->tex_materials.back().tex_name = uFormat("material_%d", index);
1045  output->tex_polygons.resize(output->tex_polygons.size() + 1);
1046  output->tex_coordinates.resize(output->tex_coordinates.size() + 1);
1047  }
1048 
1049  // update and append polygon indices
1050  int oi = output->tex_polygons[index].size();
1051  output->tex_polygons[index].resize(output->tex_polygons[index].size() + (*iter)->tex_polygons[i].size());
1052  for(unsigned int j=0; j<(*iter)->tex_polygons[i].size(); ++j)
1053  {
1054  pcl::Vertices polygon = (*iter)->tex_polygons[i][j];
1055  for(unsigned int k=0; k<polygon.vertices.size(); ++k)
1056  {
1057  polygon.vertices[k] += polygonStep;
1058  }
1059  output->tex_polygons[index][oi+j] = polygon;
1060  }
1061 
1062  // append uv coordinates
1063  oi = output->tex_coordinates[index].size();
1064  output->tex_coordinates[index].resize(output->tex_coordinates[index].size() + (*iter)->tex_coordinates[i].size());
1065  for(unsigned int j=0; j<(*iter)->tex_coordinates[i].size(); ++j)
1066  {
1067  output->tex_coordinates[index][oi+j] = (*iter)->tex_coordinates[i][j];
1068  }
1069  }
1070  }
1071  }
1072  return output;
1073 }
1074 
1075 int gcd(int a, int b) {
1076  return b == 0 ? a : gcd(b, a % b);
1077 }
1078 
1079 void concatenateTextureMaterials(pcl::TextureMesh & mesh, const cv::Size & imageSize, int textureSize, int maxTextures, float & scale, std::vector<bool> * materialsKept)
1080 {
1081  UASSERT(textureSize>0 && imageSize.width>0 && imageSize.height>0);
1082  if(maxTextures < 1)
1083  {
1084  maxTextures = 1;
1085  }
1086  int materials = 0;
1087  for(unsigned int i=0; i<mesh.tex_materials.size(); ++i)
1088  {
1089  if(mesh.tex_polygons.size())
1090  {
1091  ++materials;
1092  }
1093  }
1094  if(materials)
1095  {
1096  int w = imageSize.width; // 640
1097  int h = imageSize.height; // 480
1098  int g = gcd(w,h); // 160
1099  int a = w/g; // 4=640/160
1100  int b = h/g; // 3=480/160
1101  UDEBUG("w=%d h=%d g=%d a=%d b=%d", w, h, g, a, b);
1102  int colCount = 0;
1103  int rowCount = 0;
1104  float factor = 0.1f;
1105  float epsilon = 0.001f;
1106  scale = 1.0f;
1107  while((colCount*rowCount)*maxTextures < materials || (factor == 0.1f || scale > 1.0f))
1108  {
1109  // first run try scale = 1 (no scaling)
1110  if(factor!=0.1f)
1111  {
1112  scale = float(textureSize)/float(w*b*factor);
1113  }
1114  colCount = float(textureSize)/(scale*float(w));
1115  rowCount = float(textureSize)/(scale*float(h));
1116  factor+=epsilon; // search the maximum perfect fit
1117  }
1118  int outputTextures = (materials / (colCount*rowCount)) + (materials % (colCount*rowCount) > 0?1:0);
1119  UDEBUG("materials=%d col=%d row=%d output textures=%d factor=%f scale=%f", materials, colCount, rowCount, outputTextures, factor-epsilon, scale);
1120 
1121  UASSERT(mesh.tex_coordinates.size() == mesh.tex_materials.size() && mesh.tex_polygons.size() == mesh.tex_materials.size());
1122 
1123  // prepare size
1124  std::vector<int> totalPolygons(outputTextures, 0);
1125  std::vector<int> totalCoordinates(outputTextures, 0);
1126  int count = 0;
1127  for(unsigned int i=0; i<mesh.tex_materials.size(); ++i)
1128  {
1129  if(mesh.tex_polygons[i].size())
1130  {
1131  int indexMaterial = count / (colCount*rowCount);
1132  UASSERT(indexMaterial < outputTextures);
1133 
1134  totalPolygons[indexMaterial]+=mesh.tex_polygons[i].size();
1135  totalCoordinates[indexMaterial]+=mesh.tex_coordinates[i].size();
1136 
1137  ++count;
1138  }
1139  }
1140 
1141  pcl::TextureMesh outputMesh;
1142 
1143  int pi = 0;
1144  int ci = 0;
1145  int ti=0;
1146  float scaledHeight = float(int(scale*float(h)))/float(textureSize);
1147  float scaledWidth = float(int(scale*float(w)))/float(textureSize);
1148  float lowerBorderSize = 1.0f - scaledHeight*float(rowCount);
1149  UDEBUG("scaledWidth=%f scaledHeight=%f lowerBorderSize=%f", scaledWidth, scaledHeight, lowerBorderSize);
1150  if(materialsKept)
1151  {
1152  materialsKept->resize(mesh.tex_materials.size(), false);
1153  }
1154  for(unsigned int t=0; t<mesh.tex_materials.size(); ++t)
1155  {
1156  if(mesh.tex_polygons[t].size())
1157  {
1158  int indexMaterial = ti / (colCount*rowCount);
1159  UASSERT(indexMaterial < outputTextures);
1160  if((int)outputMesh.tex_polygons.size() <= indexMaterial)
1161  {
1162  std::vector<pcl::Vertices> newPolygons(totalPolygons[indexMaterial]);
1163 #if PCL_VERSION_COMPARE(>=, 1, 8, 0)
1164  std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > newCoordinates(totalCoordinates[indexMaterial]); // UV coordinates
1165 #else
1166  std::vector<Eigen::Vector2f> newCoordinates(totalCoordinates[indexMaterial]); // UV coordinates
1167 #endif
1168  outputMesh.tex_polygons.push_back(newPolygons);
1169  outputMesh.tex_coordinates.push_back(newCoordinates);
1170 
1171  pi=0;
1172  ci=0;
1173  }
1174 
1175  int row = (ti/colCount) % rowCount;
1176  int col = ti%colCount;
1177  float offsetU = scaledWidth * float(col);
1178  float offsetV = scaledHeight * float((rowCount - 1) - row) + lowerBorderSize;
1179  // Texture coords have lower-left origin
1180 
1181  for(unsigned int i=0; i<mesh.tex_polygons[t].size(); ++i)
1182  {
1183  UASSERT(pi < (int)outputMesh.tex_polygons[indexMaterial].size());
1184  outputMesh.tex_polygons[indexMaterial][pi++] = mesh.tex_polygons[t].at(i);
1185  }
1186 
1187  for(unsigned int i=0; i<mesh.tex_coordinates[t].size(); ++i)
1188  {
1189  const Eigen::Vector2f & v = mesh.tex_coordinates[t].at(i);
1190  if(v[0] >= 0 && v[1] >=0)
1191  {
1192  outputMesh.tex_coordinates[indexMaterial][ci][0] = v[0]*scaledWidth + offsetU;
1193  outputMesh.tex_coordinates[indexMaterial][ci][1] = v[1]*scaledHeight + offsetV;
1194  }
1195  else
1196  {
1197  outputMesh.tex_coordinates[indexMaterial][ci] = v;
1198  }
1199  ++ci;
1200  }
1201  ++ti;
1202  if(materialsKept)
1203  {
1204  materialsKept->at(t) = true;
1205  }
1206  }
1207  }
1208  pcl::TexMaterial m = mesh.tex_materials.front();
1209  mesh.tex_materials.clear();
1210  for(int i=0; i<outputTextures; ++i)
1211  {
1212  m.tex_file = "texture";
1213  m.tex_name = "material";
1214  if(outputTextures > 1)
1215  {
1216  m.tex_file += uNumber2Str(i);
1217  m.tex_name += uNumber2Str(i);
1218  }
1219 
1220  mesh.tex_materials.push_back(m);
1221  }
1222  mesh.tex_coordinates = outputMesh.tex_coordinates;
1223  mesh.tex_polygons = outputMesh.tex_polygons;
1224  }
1225 }
1226 
1227 std::vector<std::vector<RTABMAP_PCL_INDEX> > convertPolygonsFromPCL(const std::vector<pcl::Vertices> & polygons)
1228 {
1229  std::vector<std::vector<RTABMAP_PCL_INDEX> > polygonsOut(polygons.size());
1230  for(unsigned int p=0; p<polygons.size(); ++p)
1231  {
1232  polygonsOut[p] = polygons[p].vertices;
1233  }
1234  return polygonsOut;
1235 }
1236 std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > convertPolygonsFromPCL(const std::vector<std::vector<pcl::Vertices> > & tex_polygons)
1237 {
1238  std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > polygonsOut(tex_polygons.size());
1239  for(unsigned int t=0; t<tex_polygons.size(); ++t)
1240  {
1241  polygonsOut[t].resize(tex_polygons[t].size());
1242  for(unsigned int p=0; p<tex_polygons[t].size(); ++p)
1243  {
1244  polygonsOut[t][p] = tex_polygons[t][p].vertices;
1245  }
1246  }
1247  return polygonsOut;
1248 }
1249 std::vector<pcl::Vertices> convertPolygonsToPCL(const std::vector<std::vector<RTABMAP_PCL_INDEX> > & polygons)
1250 {
1251  std::vector<pcl::Vertices> polygonsOut(polygons.size());
1252  for(unsigned int p=0; p<polygons.size(); ++p)
1253  {
1254  polygonsOut[p].vertices = polygons[p];
1255  }
1256  return polygonsOut;
1257 }
1258 std::vector<std::vector<pcl::Vertices> > convertPolygonsToPCL(const std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > & tex_polygons)
1259 {
1260  std::vector<std::vector<pcl::Vertices> > polygonsOut(tex_polygons.size());
1261  for(unsigned int t=0; t<tex_polygons.size(); ++t)
1262  {
1263  polygonsOut[t].resize(tex_polygons[t].size());
1264  for(unsigned int p=0; p<tex_polygons[t].size(); ++p)
1265  {
1266  polygonsOut[t][p].vertices = tex_polygons[t][p];
1267  }
1268  }
1269  return polygonsOut;
1270 }
1271 
1272 pcl::TextureMesh::Ptr assembleTextureMesh(
1273  const cv::Mat & cloudMat,
1274  const std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > & polygons,
1275 #if PCL_VERSION_COMPARE(>=, 1, 8, 0)
1276  const std::vector<std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > > & texCoords,
1277 #else
1278  const std::vector<std::vector<Eigen::Vector2f> > & texCoords,
1279 #endif
1280  cv::Mat & textures,
1281  bool mergeTextures)
1282 {
1283  pcl::TextureMesh::Ptr textureMesh(new pcl::TextureMesh);
1284 
1285  if(cloudMat.channels() <= 3)
1286  {
1287  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::laserScanToPointCloud(LaserScan::backwardCompatibility(cloudMat));
1288  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1289  }
1290  else if(cloudMat.channels() == 4)
1291  {
1292  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGB(LaserScan::backwardCompatibility(cloudMat));
1293  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1294  }
1295  else if(cloudMat.channels() == 6)
1296  {
1297  pcl::PointCloud<pcl::PointNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudNormal(LaserScan::backwardCompatibility(cloudMat));
1298  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1299  }
1300  else if(cloudMat.channels() == 7)
1301  {
1302  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGBNormal(LaserScan::backwardCompatibility(cloudMat));
1303  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1304  }
1305 
1306  if(textureMesh->cloud.data.size() && polygons.size())
1307  {
1308  textureMesh->tex_polygons.resize(polygons.size());
1309  for(unsigned int t=0; t<polygons.size(); ++t)
1310  {
1311  textureMesh->tex_polygons[t].resize(polygons[t].size());
1312  for(unsigned int p=0; p<polygons[t].size(); ++p)
1313  {
1314  textureMesh->tex_polygons[t][p].vertices = polygons[t][p];
1315  }
1316  }
1317 
1318  if(!texCoords.empty() && !textures.empty())
1319  {
1320  textureMesh->tex_coordinates = texCoords;
1321 
1322  textureMesh->tex_materials.resize (textureMesh->tex_coordinates.size());
1323  for(unsigned int i = 0 ; i < textureMesh->tex_coordinates.size() ; ++i)
1324  {
1325  pcl::TexMaterial mesh_material;
1326  mesh_material.tex_Ka.r = 0.2f;
1327  mesh_material.tex_Ka.g = 0.2f;
1328  mesh_material.tex_Ka.b = 0.2f;
1329 
1330  mesh_material.tex_Kd.r = 0.8f;
1331  mesh_material.tex_Kd.g = 0.8f;
1332  mesh_material.tex_Kd.b = 0.8f;
1333 
1334  mesh_material.tex_Ks.r = 1.0f;
1335  mesh_material.tex_Ks.g = 1.0f;
1336  mesh_material.tex_Ks.b = 1.0f;
1337 
1338  mesh_material.tex_d = 1.0f;
1339  mesh_material.tex_Ns = 75.0f;
1340  mesh_material.tex_illum = 2;
1341 
1342  std::stringstream tex_name;
1343  tex_name << "material_" << i;
1344  tex_name >> mesh_material.tex_name;
1345 
1346  mesh_material.tex_file = uFormat("%d", i);
1347 
1348  textureMesh->tex_materials[i] = mesh_material;
1349  }
1350 
1351  if(mergeTextures && textures.cols/textures.rows > 1)
1352  {
1353  UASSERT(textures.cols % textures.rows == 0 && textures.cols/textures.rows == (int)textureMesh->tex_coordinates.size());
1354  std::vector<bool> materialsKept;
1355  float scale = 0.0f;
1356  cv::Size imageSize(textures.rows, textures.rows);
1357  int imageType = textures.type();
1358  rtabmap::util3d::concatenateTextureMaterials(*textureMesh, imageSize, textures.rows, 1, scale, &materialsKept);
1359  if(scale && textureMesh->tex_materials.size() == 1)
1360  {
1361  int cols = float(textures.rows)/(scale*imageSize.width);
1362  int rows = float(textures.rows)/(scale*imageSize.height);
1363 
1364  cv::Mat mergedTextures = cv::Mat(textures.rows, textures.rows, imageType, cv::Scalar::all(255));
1365 
1366  // make a blank texture
1367  cv::Size resizedImageSize(int(imageSize.width*scale), int(imageSize.height*scale));
1368  int oi=0;
1369  for(int i=0; i<(int)materialsKept.size(); ++i)
1370  {
1371  if(materialsKept.at(i))
1372  {
1373  int u = oi%cols * resizedImageSize.width;
1374  int v = ((oi/cols) % rows ) * resizedImageSize.height;
1375  UASSERT(u < textures.rows-resizedImageSize.width);
1376  UASSERT(v < textures.rows-resizedImageSize.height);
1377 
1378  cv::Mat resizedImage;
1379  cv::resize(textures(cv::Range::all(), cv::Range(i*textures.rows, (i+1)*textures.rows)), resizedImage, resizedImageSize, 0.0f, 0.0f, cv::INTER_AREA);
1380 
1381  UASSERT(resizedImage.type() == mergedTextures.type());
1382  resizedImage.copyTo(mergedTextures(cv::Rect(u, v, resizedImage.cols, resizedImage.rows)));
1383 
1384  ++oi;
1385  }
1386  }
1387  textures = mergedTextures;
1388  }
1389  }
1390  }
1391  }
1392  return textureMesh;
1393 }
1394 
1395 pcl::PolygonMesh::Ptr assemblePolygonMesh(
1396  const cv::Mat & cloudMat,
1397  const std::vector<std::vector<RTABMAP_PCL_INDEX> > & polygons)
1398 {
1399  pcl::PolygonMesh::Ptr polygonMesh(new pcl::PolygonMesh);
1400 
1401  if(cloudMat.channels() <= 3)
1402  {
1403  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::laserScanToPointCloud(LaserScan::backwardCompatibility(cloudMat));
1404  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1405  }
1406  else if(cloudMat.channels() == 4)
1407  {
1408  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGB(LaserScan::backwardCompatibility(cloudMat));
1409  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1410  }
1411  else if(cloudMat.channels() == 6)
1412  {
1413  pcl::PointCloud<pcl::PointNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudNormal(LaserScan::backwardCompatibility(cloudMat));
1414  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1415  }
1416  else if(cloudMat.channels() == 7)
1417  {
1418  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGBNormal(LaserScan::backwardCompatibility(cloudMat));
1419  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1420  }
1421 
1422  if(polygonMesh->cloud.data.size() && polygons.size())
1423  {
1424  polygonMesh->polygons.resize(polygons.size());
1425  for(unsigned int p=0; p<polygons.size(); ++p)
1426  {
1427  polygonMesh->polygons[p].vertices = polygons[p];
1428  }
1429  }
1430  return polygonMesh;
1431 }
1432 
1433 double sqr(uchar v)
1434 {
1435  return double(v)*double(v);
1436 }
1437 
1439  pcl::TextureMesh & mesh,
1440  const std::map<int, cv::Mat> & images,
1441  const std::map<int, CameraModel> & calibrations,
1442  const Memory * memory,
1443  const DBDriver * dbDriver,
1444  int textureSize,
1445  int textureCount,
1446  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels,
1447  bool gainCompensation,
1448  float gainBeta,
1449  bool gainRGB,
1450  bool blending,
1451  int blendingDecimation,
1452  int brightnessContrastRatioLow,
1453  int brightnessContrastRatioHigh,
1454  bool exposureFusion,
1455  const ProgressState * state,
1456  unsigned char blankValue,
1457  bool clearVertexColorUnderTexture,
1458  std::map<int, std::map<int, cv::Vec4d> > * gains,
1459  std::map<int, std::map<int, cv::Mat> > * blendingGains,
1460  std::pair<float, float> * contrastValues)
1461 {
1462  std::map<int, std::vector<CameraModel> > calibVectors;
1463  for(std::map<int, CameraModel>::const_iterator iter=calibrations.begin(); iter!=calibrations.end(); ++iter)
1464  {
1465  std::vector<CameraModel> m;
1466  m.push_back(iter->second);
1467  calibVectors.insert(std::make_pair(iter->first, m));
1468  }
1469  return mergeTextures(mesh,
1470  images,
1471  calibVectors,
1472  memory,
1473  dbDriver,
1474  textureSize,
1475  textureCount,
1476  vertexToPixels,
1477  gainCompensation,
1478  gainBeta,
1479  gainRGB,
1480  blending,
1481  blendingDecimation,
1482  brightnessContrastRatioLow,
1483  brightnessContrastRatioHigh,
1485  state,
1486  blankValue,
1487  clearVertexColorUnderTexture,
1488  gains,
1489  blendingGains,
1490  contrastValues);
1491 }
1493  pcl::TextureMesh & mesh,
1494  const std::map<int, cv::Mat> & images,
1495  const std::map<int, std::vector<CameraModel> > & calibrations,
1496  const Memory * memory,
1497  const DBDriver * dbDriver,
1498  int textureSize,
1499  int textureCount,
1500  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels,
1501  bool gainCompensation,
1502  float gainBeta,
1503  bool gainRGB,
1504  bool blending,
1505  int blendingDecimation,
1506  int brightnessContrastRatioLow,
1507  int brightnessContrastRatioHigh,
1508  bool exposureFusion,
1509  const ProgressState * state,
1510  unsigned char blankValue,
1511  bool clearVertexColorUnderTexture,
1512  std::map<int, std::map<int, cv::Vec4d> > * gainsOut,
1513  std::map<int, std::map<int, cv::Mat> > * blendingGainsOut,
1514  std::pair<float, float> * contrastValuesOut)
1515 {
1516  //get texture size, if disabled use default 1024
1517  UASSERT(textureSize%256 == 0);
1518  UDEBUG("textureSize = %d", textureSize);
1519  cv::Mat globalTextures;
1520  if(!mesh.tex_materials.empty())
1521  {
1522  std::vector<std::pair<int, int> > textures(mesh.tex_materials.size(), std::pair<int, int>(-1,0));
1523  cv::Size imageSize;
1524  const int imageType=CV_8UC3;
1525 
1526  UDEBUG("");
1527  for(unsigned int i=0; i<mesh.tex_materials.size(); ++i)
1528  {
1529  std::list<std::string> texFileSplit = uSplit(mesh.tex_materials[i].tex_file, '_');
1530  if(!mesh.tex_materials[i].tex_file.empty() &&
1531  mesh.tex_polygons[i].size() &&
1532  uIsInteger(texFileSplit.front(), false))
1533  {
1534  textures[i].first = uStr2Int(texFileSplit.front());
1535  if(texFileSplit.size() == 2 &&
1536  uIsInteger(texFileSplit.back(), false) )
1537  {
1538  textures[i].second = uStr2Int(texFileSplit.back());
1539  }
1540 
1541  int textureId = textures[i].first;
1542  if(imageSize.width == 0 || imageSize.height == 0)
1543  {
1544  if(images.find(textureId) != images.end() &&
1545  !images.find(textureId)->second.empty() &&
1546  calibrations.find(textureId) != calibrations.end())
1547  {
1548  const std::vector<CameraModel> & models = calibrations.find(textureId)->second;
1549  UASSERT(models.size()>=1);
1550  if( models[0].imageHeight()>0 &&
1551  models[0].imageWidth()>0)
1552  {
1553  imageSize = models[0].imageSize();
1554  }
1555  else if(images.find(textureId)!=images.end())
1556  {
1557  // backward compatibility for image size not set in CameraModel
1558  cv::Mat image = images.find(textureId)->second;
1559  if(image.rows == 1 && image.type() == CV_8UC1)
1560  {
1561  image = uncompressImage(image);
1562  }
1563  UASSERT(!image.empty());
1564  imageSize = image.size();
1565  if(models.size()>1)
1566  {
1567  imageSize.width/=models.size();
1568  }
1569  }
1570  }
1571  else if(memory)
1572  {
1573  SensorData data = memory->getNodeData(textureId, true, false, false, false);
1574  const std::vector<CameraModel> & models = data.cameraModels();
1575  const std::vector<StereoCameraModel> & stereoModels = data.stereoCameraModels();
1576  if(models.size()>=1 &&
1577  models[0].imageHeight()>0 &&
1578  models[0].imageWidth()>0)
1579  {
1580  imageSize = models[0].imageSize();
1581  }
1582  else if(stereoModels.size()>=1 &&
1583  stereoModels[0].left().imageHeight() > 0 &&
1584  stereoModels[0].left().imageWidth() > 0)
1585  {
1586  imageSize = stereoModels[0].left().imageSize();
1587  }
1588  else // backward compatibility for image size not set in CameraModel
1589  {
1590  cv::Mat image;
1591  data.uncompressDataConst(&image, 0);
1592  UASSERT(!image.empty());
1593  imageSize = image.size();
1594  if(data.cameraModels().size()>1)
1595  {
1596  imageSize.width/=data.cameraModels().size();
1597  }
1598  }
1599  }
1600  else if(dbDriver)
1601  {
1602  std::vector<CameraModel> models;
1603  std::vector<StereoCameraModel> stereoModels;
1604  dbDriver->getCalibration(textureId, models, stereoModels);
1605  if(models.size()>=1 &&
1606  models[0].imageHeight()>0 &&
1607  models[0].imageWidth()>0)
1608  {
1609  imageSize = models[0].imageSize();
1610  }
1611  else if(stereoModels.size()>=1 &&
1612  stereoModels[0].left().imageHeight() > 0 &&
1613  stereoModels[0].left().imageWidth() > 0)
1614  {
1615  imageSize = stereoModels[0].left().imageSize();
1616  }
1617  else // backward compatibility for image size not set in CameraModel
1618  {
1619  SensorData data;
1620  dbDriver->getNodeData(textureId, data, true, false, false, false);
1621  cv::Mat image;
1622  data.uncompressDataConst(&image, 0);
1623  UASSERT(!image.empty());
1624  imageSize = image.size();
1625  if(data.cameraModels().size()>1)
1626  {
1627  imageSize.width/=data.cameraModels().size();
1628  }
1629  }
1630  }
1631  }
1632  }
1633  else if(mesh.tex_polygons[i].size() && mesh.tex_materials[i].tex_file.compare("occluded")!=0)
1634  {
1635  UWARN("Failed parsing texture file name: %s", mesh.tex_materials[i].tex_file.c_str());
1636  }
1637  }
1638  UDEBUG("textures=%d imageSize=%dx%d", (int)textures.size(), imageSize.height, imageSize.width);
1639  if(textures.size() && imageSize.height>0 && imageSize.width>0)
1640  {
1641  float scale = 0.0f;
1642  UDEBUG("");
1643  std::vector<bool> materialsKept;
1644  util3d::concatenateTextureMaterials(mesh, imageSize, textureSize, textureCount, scale, &materialsKept);
1645  if(scale && mesh.tex_materials.size())
1646  {
1647  int materials = (int)mesh.tex_materials.size();
1648  int cols = float(textureSize)/(scale*imageSize.width);
1649  int rows = float(textureSize)/(scale*imageSize.height);
1650 
1651  globalTextures = cv::Mat(textureSize, materials*textureSize, imageType, cv::Scalar::all(blankValue));
1652  cv::Mat globalTextureMasks = cv::Mat(textureSize, materials*textureSize, CV_8UC1, cv::Scalar::all(0));
1653 
1654  // used for multi camera texturing, to avoid reloading same texture for sub cameras
1655  cv::Mat previousImage;
1656  int previousTextureId = 0;
1657  std::vector<CameraModel> previousCameraModels;
1658 
1659  // make a blank texture
1660  cv::Mat emptyImage(int(imageSize.height*scale), int(imageSize.width*scale), imageType, cv::Scalar::all(blankValue));
1661  cv::Mat emptyImageMask(int(imageSize.height*scale), int(imageSize.width*scale), CV_8UC1, cv::Scalar::all(255));
1662  int oi=0;
1663  std::vector<cv::Point2i> imageOrigin(textures.size());
1664  std::vector<int> newCamIndex(textures.size(), -1);
1665  for(int t=0; t<(int)textures.size(); ++t)
1666  {
1667  if(materialsKept.at(t))
1668  {
1669  int indexMaterial = oi / (cols*rows);
1670  UASSERT(indexMaterial < materials);
1671 
1672  newCamIndex[t] = oi;
1673  int u = oi%cols * emptyImage.cols;
1674  int v = ((oi/cols) % rows ) * emptyImage.rows;
1675  UASSERT_MSG(u < textureSize-emptyImage.cols, uFormat("u=%d textureSize=%d emptyImage.cols=%d", u, textureSize, emptyImage.cols).c_str());
1676  UASSERT_MSG(v < textureSize-emptyImage.rows, uFormat("v=%d textureSize=%d emptyImage.rows=%d", v, textureSize, emptyImage.rows).c_str());
1677  imageOrigin[t].x = u;
1678  imageOrigin[t].y = v;
1679  if(textures[t].first>=0)
1680  {
1681  cv::Mat image;
1682  std::vector<CameraModel> models;
1683 
1684  if(textures[t].first == previousTextureId)
1685  {
1686  image = previousImage;
1687  models = previousCameraModels;
1688  }
1689  else
1690  {
1691  if(images.find(textures[t].first) != images.end() &&
1692  !images.find(textures[t].first)->second.empty() &&
1693  calibrations.find(textures[t].first) != calibrations.end())
1694  {
1695  image = images.find(textures[t].first)->second;
1696  if(image.rows == 1 && image.type() == CV_8UC1)
1697  {
1698  image = uncompressImage(image);
1699  }
1700  models = calibrations.find(textures[t].first)->second;
1701  }
1702  else if(memory)
1703  {
1704  SensorData data = memory->getNodeData(textures[t].first, true, false, false, false);
1705  models = data.cameraModels();
1706  if(models.empty() && !data.stereoCameraModels().empty())
1707  {
1708  for(size_t i=0; i<data.stereoCameraModels().size(); ++i)
1709  {
1710  models.push_back(data.stereoCameraModels()[i].left());
1711  }
1712  }
1713  data.uncompressDataConst(&image, 0);
1714  }
1715  else if(dbDriver)
1716  {
1717  SensorData data;
1718  dbDriver->getNodeData(textures[t].first, data, true, false, false, false);
1719  data.uncompressDataConst(&image, 0);
1720  std::vector<StereoCameraModel> stereoModels;
1721  dbDriver->getCalibration(textures[t].first, models, stereoModels);
1722  if(models.empty() && !stereoModels.empty())
1723  {
1724  for(size_t i=0; i<stereoModels.size(); ++i)
1725  {
1726  models.push_back(stereoModels[i].left());
1727  }
1728  }
1729  }
1730 
1731  previousImage = image;
1732  previousCameraModels = models;
1733  previousTextureId = textures[t].first;
1734  }
1735 
1736  UASSERT(!image.empty());
1737 
1738  if(textures[t].second>=0)
1739  {
1740  UASSERT(textures[t].second < (int)models.size());
1741  int width = image.cols/models.size();
1742  image = image.colRange(width*textures[t].second, width*(textures[t].second+1));
1743  }
1744 
1745  cv::Mat resizedImage;
1746  cv::resize(image, resizedImage, emptyImage.size(), 0.0f, 0.0f, cv::INTER_AREA);
1747  UASSERT(resizedImage.type() == CV_8UC1 || resizedImage.type() == CV_8UC3);
1748  if(resizedImage.type() == CV_8UC1)
1749  {
1750  cv::Mat resizedImageColor;
1751  cv::cvtColor(resizedImage, resizedImageColor, CV_GRAY2BGR);
1752  resizedImage = resizedImageColor;
1753  }
1754  UASSERT(resizedImage.type() == globalTextures.type());
1755  resizedImage.copyTo(globalTextures(cv::Rect(u+indexMaterial*globalTextures.rows, v, resizedImage.cols, resizedImage.rows)));
1756  emptyImageMask.copyTo(globalTextureMasks(cv::Rect(u+indexMaterial*globalTextureMasks.rows, v, resizedImage.cols, resizedImage.rows)));
1757  }
1758  else
1759  {
1760  emptyImage.copyTo(globalTextures(cv::Rect(u+indexMaterial*globalTextures.rows, v, emptyImage.cols, emptyImage.rows)));
1761  }
1762  ++oi;
1763  }
1764 
1765  if(state)
1766  {
1767  if(state->isCanceled())
1768  {
1769  return cv::Mat();
1770  }
1771  state->callback(uFormat("Assembled texture %d/%d.", t+1, (int)textures.size()));
1772  }
1773  }
1774 
1775  UTimer timer;
1776  if(vertexToPixels.size())
1777  {
1778  //UWARN("Saving original.png", globalTexture);
1779  //cv::imwrite("original.png", globalTexture);
1780 
1781  if(gainCompensation)
1782  {
1787  const int num_images = static_cast<int>(oi);
1788  cv::Mat_<int> N(num_images, num_images); N.setTo(0);
1789  cv::Mat_<double> I(num_images, num_images); I.setTo(0);
1790 
1791  cv::Mat_<double> IR(num_images, num_images); IR.setTo(0);
1792  cv::Mat_<double> IG(num_images, num_images); IG.setTo(0);
1793  cv::Mat_<double> IB(num_images, num_images); IB.setTo(0);
1794 
1795  // Adjust UV coordinates to globalTexture
1796  for(unsigned int p=0; p<vertexToPixels.size(); ++p)
1797  {
1798  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[p].begin(); iter!=vertexToPixels[p].end(); ++iter)
1799  {
1800  if(materialsKept.at(iter->first))
1801  {
1802  N(newCamIndex[iter->first], newCamIndex[iter->first]) +=1;
1803 
1804  std::map<int, pcl::PointXY>::const_iterator jter=iter;
1805  ++jter;
1806  int k = 1;
1807  for(; jter!=vertexToPixels[p].end(); ++jter, ++k)
1808  {
1809  if(materialsKept.at(jter->first))
1810  {
1811  int i = newCamIndex[iter->first];
1812  int j = newCamIndex[jter->first];
1813 
1814  N(i, j) += 1;
1815  N(j, i) += 1;
1816 
1817  int indexMaterial = i / (cols*rows);
1818 
1819  // uv in globalTexture
1820  int ui = iter->second.x*emptyImage.cols + imageOrigin[iter->first].x;
1821  int vi = (1.0-iter->second.y)*emptyImage.rows + imageOrigin[iter->first].y;
1822  int uj = jter->second.x*emptyImage.cols + imageOrigin[jter->first].x;
1823  int vj = (1.0-jter->second.y)*emptyImage.rows + imageOrigin[jter->first].y;
1824  cv::Vec3b * pt1 = globalTextures.ptr<cv::Vec3b>(vi,ui+indexMaterial*globalTextures.rows);
1825  cv::Vec3b * pt2 = globalTextures.ptr<cv::Vec3b>(vj,uj+indexMaterial*globalTextures.rows);
1826 
1827  I(i, j) += std::sqrt(static_cast<double>(sqr(pt1->val[0]) + sqr(pt1->val[1]) + sqr(pt1->val[2])));
1828  I(j, i) += std::sqrt(static_cast<double>(sqr(pt2->val[0]) + sqr(pt2->val[1]) + sqr(pt2->val[2])));
1829 
1830  IR(i, j) += static_cast<double>(pt1->val[2]);
1831  IR(j, i) += static_cast<double>(pt2->val[2]);
1832  IG(i, j) += static_cast<double>(pt1->val[1]);
1833  IG(j, i) += static_cast<double>(pt2->val[1]);
1834  IB(i, j) += static_cast<double>(pt1->val[0]);
1835  IB(j, i) += static_cast<double>(pt2->val[0]);
1836  }
1837  }
1838  }
1839  }
1840  }
1841 
1842  for(int i=0; i<num_images; ++i)
1843  {
1844  for(int j=i; j<num_images; ++j)
1845  {
1846  if(i == j)
1847  {
1848  if(N(i,j) == 0)
1849  {
1850  N(i,j) = 1;
1851  }
1852  }
1853  else if(N(i, j))
1854  {
1855  I(i, j) /= N(i, j);
1856  I(j, i) /= N(j, i);
1857 
1858  IR(i, j) /= N(i, j);
1859  IR(j, i) /= N(j, i);
1860  IG(i, j) /= N(i, j);
1861  IG(j, i) /= N(j, i);
1862  IB(i, j) /= N(i, j);
1863  IB(j, i) /= N(j, i);
1864  }
1865  }
1866  }
1867 
1868  cv::Mat_<double> A(num_images, num_images); A.setTo(0);
1869  cv::Mat_<double> b(num_images, 1); b.setTo(0);
1870  cv::Mat_<double> AR(num_images, num_images); AR.setTo(0);
1871  cv::Mat_<double> AG(num_images, num_images); AG.setTo(0);
1872  cv::Mat_<double> AB(num_images, num_images); AB.setTo(0);
1873  double alpha = 0.01;
1874  double beta = gainBeta;
1875  for (int i = 0; i < num_images; ++i)
1876  {
1877  for (int j = 0; j < num_images; ++j)
1878  {
1879  b(i, 0) += beta * N(i, j);
1880  A(i, i) += beta * N(i, j);
1881  AR(i, i) += beta * N(i, j);
1882  AG(i, i) += beta * N(i, j);
1883  AB(i, i) += beta * N(i, j);
1884  if (j == i) continue;
1885  A(i, i) += 2 * alpha * I(i, j) * I(i, j) * N(i, j);
1886  A(i, j) -= 2 * alpha * I(i, j) * I(j, i) * N(i, j);
1887 
1888  AR(i, i) += 2 * alpha * IR(i, j) * IR(i, j) * N(i, j);
1889  AR(i, j) -= 2 * alpha * IR(i, j) * IR(j, i) * N(i, j);
1890 
1891  AG(i, i) += 2 * alpha * IG(i, j) * IG(i, j) * N(i, j);
1892  AG(i, j) -= 2 * alpha * IG(i, j) * IG(j, i) * N(i, j);
1893 
1894  AB(i, i) += 2 * alpha * IB(i, j) * IB(i, j) * N(i, j);
1895  AB(i, j) -= 2 * alpha * IB(i, j) * IB(j, i) * N(i, j);
1896  }
1897  }
1898 
1899  cv::Mat_<double> gainsGray, gainsR, gainsG, gainsB;
1900  cv::solve(A, b, gainsGray);
1901 
1902  cv::solve(AR, b, gainsR);
1903  cv::solve(AG, b, gainsG);
1904  cv::solve(AB, b, gainsB);
1905 
1906  cv::Mat_<double> gains(gainsGray.rows, 4);
1907  gainsGray.copyTo(gains.col(0));
1908  gainsR.copyTo(gains.col(1));
1909  gainsG.copyTo(gains.col(2));
1910  gainsB.copyTo(gains.col(3));
1911 
1912  for(int t=0; t<(int)textures.size(); ++t)
1913  {
1914  //break;
1915  if(materialsKept.at(t))
1916  {
1917  int u = imageOrigin[t].x;
1918  int v = imageOrigin[t].y;
1919 
1920  UDEBUG("Gain cam%d = %f", newCamIndex[t], gainsGray(newCamIndex[t], 0));
1921 
1922  int indexMaterial = newCamIndex[t] / (cols*rows);
1923  cv::Mat roi = globalTextures(cv::Rect(u+indexMaterial*globalTextures.rows, v, emptyImage.cols, emptyImage.rows));
1924 
1925  std::vector<cv::Mat> channels;
1926  cv::split(roi, channels);
1927 
1928  // assuming BGR
1929  cv::multiply(channels[0], gains(newCamIndex[t], gainRGB?3:0), channels[0]);
1930  cv::multiply(channels[1], gains(newCamIndex[t], gainRGB?2:0), channels[1]);
1931  cv::multiply(channels[2], gains(newCamIndex[t], gainRGB?1:0), channels[2]);
1932 
1933  cv::merge(channels, roi);
1934 
1935  if(gainsOut)
1936  {
1937  cv::Vec4d g(
1938  gains(newCamIndex[t], 0),
1939  gains(newCamIndex[t], 1),
1940  gains(newCamIndex[t], 2),
1941  gains(newCamIndex[t], 3));
1942  if(gainsOut->find(textures[t].first) == gainsOut->end())
1943  {
1944  std::map<int,cv::Vec4d> value;
1945  value.insert(std::make_pair(textures[t].second, g));
1946  gainsOut->insert(std::make_pair(textures[t].first, value));
1947  }
1948  else
1949  {
1950  gainsOut->at(textures[t].first).insert(std::make_pair(textures[t].second, g));
1951  }
1952  }
1953  }
1954  }
1955  //UWARN("Saving gain.png", globalTexture);
1956  //cv::imwrite("gain.png", globalTexture);
1957  if(state) state->callback(uFormat("Gain compensation %fs", timer.ticks()));
1958  }
1959 
1960  if(blending)
1961  {
1962  // blending BGR
1963  int decimation = 1;
1964  if(blendingDecimation <= 0)
1965  {
1966  // determinate decimation to apply
1967  std::vector<float> edgeLengths;
1968  if(mesh.tex_coordinates.size() && mesh.tex_coordinates[0].size())
1969  {
1970  UASSERT(mesh.tex_polygons.size() && mesh.tex_polygons[0].size() && mesh.tex_polygons[0][0].vertices.size());
1971  int polygonSize = mesh.tex_polygons[0][0].vertices.size();
1972  UDEBUG("polygon size=%d", polygonSize);
1973 
1974  for(unsigned int k=0; k<mesh.tex_coordinates.size(); ++k)
1975  {
1976  for(unsigned int i=0; i<mesh.tex_coordinates[k].size(); i+=polygonSize)
1977  {
1978  for(int j=0; j<polygonSize; ++j)
1979  {
1980  const Eigen::Vector2f & uc1 = mesh.tex_coordinates[k][i + j];
1981  const Eigen::Vector2f & uc2 = mesh.tex_coordinates[k][i + (j+1)%polygonSize];
1982  Eigen::Vector2f edge = (uc1-uc2)*textureSize;
1983  edgeLengths.push_back(fabs(edge[0]));
1984  edgeLengths.push_back(fabs(edge[1]));
1985  }
1986  }
1987  }
1988  float edgeLength = 0.0f;
1989  if(edgeLengths.size())
1990  {
1991  std::sort(edgeLengths.begin(), edgeLengths.end());
1992  float m = uMean(edgeLengths.data(), edgeLengths.size());
1993  float stddev = std::sqrt(uVariance(edgeLengths.data(), edgeLengths.size(), m));
1994  edgeLength = m+stddev;
1995  decimation = 1 << 6;
1996  for(int i=1; i<=6; ++i)
1997  {
1998  if(float(1 << i) >= edgeLength)
1999  {
2000  decimation = 1 << i;
2001  break;
2002  }
2003  }
2004  }
2005 
2006  UDEBUG("edge length=%f decimation=%d", edgeLength, decimation);
2007  }
2008  }
2009  else
2010  {
2011  if(blendingDecimation > 1)
2012  {
2013  UASSERT(textureSize % blendingDecimation == 0);
2014  }
2015  decimation = blendingDecimation;
2016  UDEBUG("decimation=%d", decimation);
2017  }
2018 
2019  std::vector<cv::Mat> blendGains(materials);
2020  for(int i=0; i<materials;++i)
2021  {
2022  blendGains[i] = cv::Mat(globalTextures.rows/decimation, globalTextures.rows/decimation, CV_32FC3, cv::Scalar::all(1.0f));
2023  }
2024 
2025  for(unsigned int p=0; p<vertexToPixels.size(); ++p)
2026  {
2027  if(vertexToPixels[p].size() > 1)
2028  {
2029  std::vector<float> gainsB(vertexToPixels[p].size());
2030  std::vector<float> gainsG(vertexToPixels[p].size());
2031  std::vector<float> gainsR(vertexToPixels[p].size());
2032  float sumWeight = 0.0f;
2033  int k=0;
2034  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[p].begin(); iter!=vertexToPixels[p].end(); ++iter)
2035  {
2036  if(materialsKept.at(iter->first))
2037  {
2038  int u = iter->second.x*emptyImage.cols + imageOrigin[iter->first].x;
2039  int v = (1.0-iter->second.y)*emptyImage.rows + imageOrigin[iter->first].y;
2040  float x = iter->second.x - 0.5f;
2041  float y = iter->second.y - 0.5f;
2042  float weight = 0.7f - sqrt(x*x+y*y);
2043  if(weight<0.0f)
2044  {
2045  weight = 0.0f;
2046  }
2047  int indexMaterial = newCamIndex[iter->first] / (cols*rows);
2048  cv::Vec3b * pt = globalTextures.ptr<cv::Vec3b>(v,u+indexMaterial*globalTextures.rows);
2049  gainsB[k] = static_cast<double>(pt->val[0]) * weight;
2050  gainsG[k] = static_cast<double>(pt->val[1]) * weight;
2051  gainsR[k] = static_cast<double>(pt->val[2]) * weight;
2052  sumWeight += weight;
2053  ++k;
2054  }
2055  }
2056  gainsB.resize(k);
2057  gainsG.resize(k);
2058  gainsR.resize(k);
2059 
2060  if(sumWeight > 0)
2061  {
2062  float targetColor[3];
2063  targetColor[0] = uSum(gainsB.data(), gainsB.size()) / sumWeight;
2064  targetColor[1] = uSum(gainsG.data(), gainsG.size()) / sumWeight;
2065  targetColor[2] = uSum(gainsR.data(), gainsR.size()) / sumWeight;
2066  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[p].begin(); iter!=vertexToPixels[p].end(); ++iter)
2067  {
2068  if(materialsKept.at(iter->first))
2069  {
2070  int u = iter->second.x*emptyImage.cols + imageOrigin[iter->first].x;
2071  int v = (1.0-iter->second.y)*emptyImage.rows + imageOrigin[iter->first].y;
2072  int indexMaterial = newCamIndex[iter->first] / (cols*rows);
2073  cv::Vec3b * pt = globalTextures.ptr<cv::Vec3b>(v,u+indexMaterial*globalTextures.rows);
2074  float gB = targetColor[0]/(pt->val[0]==0?1.0f:pt->val[0]);
2075  float gG = targetColor[1]/(pt->val[1]==0?1.0f:pt->val[1]);
2076  float gR = targetColor[2]/(pt->val[2]==0?1.0f:pt->val[2]);
2077  cv::Vec3f * ptr = blendGains[indexMaterial].ptr<cv::Vec3f>(v/decimation, u/decimation);
2078  ptr->val[0] = (gB>1.3f)?1.3f:(gB<0.7f)?0.7f:gB;
2079  ptr->val[1] = (gG>1.3f)?1.3f:(gG<0.7f)?0.7f:gG;
2080  ptr->val[2] = (gR>1.3f)?1.3f:(gR<0.7f)?0.7f:gR;
2081  }
2082  }
2083  }
2084  }
2085  }
2086 
2087  if(blendingGainsOut)
2088  {
2089  for(int t=0; t<(int)textures.size(); ++t)
2090  {
2091  //break;
2092  if(materialsKept.at(t))
2093  {
2094  int u = imageOrigin[t].x/decimation;
2095  int v = imageOrigin[t].y/decimation;
2096 
2097  int indexMaterial = newCamIndex[t] / (cols*rows);
2098  cv::Mat roi = blendGains[indexMaterial](cv::Rect(u, v, emptyImage.cols/decimation, emptyImage.rows/decimation));
2099  if(blendingGainsOut->find(textures[t].first) == blendingGainsOut->end())
2100  {
2101  std::map<int,cv::Mat> value;
2102  value.insert(std::make_pair(textures[t].second, roi.clone()));
2103  blendingGainsOut->insert(std::make_pair(textures[t].first, value));
2104  }
2105  else
2106  {
2107  blendingGainsOut->at(textures[t].first).insert(std::make_pair(textures[t].second, roi.clone()));
2108  }
2109  }
2110  }
2111  }
2112 
2113  for(int i=0; i<materials; ++i)
2114  {
2115  /*std::vector<cv::Mat> channels;
2116  cv::split(blendGains, channels);
2117  cv::Mat img;
2118  channels[0].convertTo(img,CV_8U,128.0,0);
2119  cv::imwrite("blendSmallB.png", img);
2120  channels[1].convertTo(img,CV_8U,128.0,0);
2121  cv::imwrite("blendSmallG.png", img);
2122  channels[2].convertTo(img,CV_8U,128.0,0);
2123  cv::imwrite("blendSmallR.png", img);*/
2124 
2125  cv::Mat globalTexturesROI = globalTextures(cv::Range::all(), cv::Range(i*globalTextures.rows, (i+1)*globalTextures.rows));
2126  cv::Mat dst;
2127  cv::blur(blendGains[i], dst, cv::Size(3,3));
2128  cv::resize(dst, blendGains[i], globalTexturesROI.size(), 0, 0, cv::INTER_LINEAR);
2129 
2130  /*cv::split(blendGains, channels);
2131  channels[0].convertTo(img,CV_8U,128.0,0);
2132  cv::imwrite("blendFullB.png", img);
2133  channels[1].convertTo(img,CV_8U,128.0,0);
2134  cv::imwrite("blendFullG.png", img);
2135  channels[2].convertTo(img,CV_8U,128.0,0);
2136  cv::imwrite("blendFullR.png", img);*/
2137 
2138  cv::multiply(globalTexturesROI, blendGains[i], globalTexturesROI, 1.0, CV_8UC3);
2139 
2140  //UWARN("Saving blending.png", globalTexture);
2141  //cv::imwrite("blending.png", globalTexture);
2142  }
2143 
2144  if(state) state->callback(uFormat("Blending (decimation=%d) %fs", decimation, timer.ticks()));
2145  }
2146  }
2147 
2148  if(brightnessContrastRatioLow > 0 || brightnessContrastRatioHigh > 0)
2149  {
2150  if(exposureFusion)
2151  {
2152  std::vector<cv::Mat> images;
2153  images.push_back(globalTextures);
2154  if (brightnessContrastRatioLow > 0)
2155  {
2156  images.push_back(util2d::brightnessAndContrastAuto(
2157  globalTextures,
2158  globalTextureMasks,
2159  (float)brightnessContrastRatioLow,
2160  0.0f));
2161  }
2162  if (brightnessContrastRatioHigh > 0)
2163  {
2164  images.push_back(util2d::brightnessAndContrastAuto(
2165  globalTextures,
2166  globalTextureMasks,
2167  0.0f,
2168  (float)brightnessContrastRatioHigh));
2169  }
2170 
2171  globalTextures = util2d::exposureFusion(images);
2172  }
2173  else
2174  {
2175  float alpha, beta;
2176  globalTextures = util2d::brightnessAndContrastAuto(
2177  globalTextures,
2178  globalTextureMasks,
2179  (float)brightnessContrastRatioLow,
2180  (float)brightnessContrastRatioHigh,
2181  &alpha,
2182  &beta);
2183  if(contrastValuesOut)
2184  {
2185  contrastValuesOut->first = alpha;
2186  contrastValuesOut->second = beta;
2187  }
2188  }
2189  if(state) state->callback(uFormat("Brightness and contrast auto %fs", timer.ticks()));
2190  }
2191  }
2192 
2193  // Cloud color
2194  if(clearVertexColorUnderTexture)
2195  {
2196  int colorOffset = 0;
2197  for(unsigned int i=0; i<mesh.cloud.fields.size(); ++i)
2198  {
2199  if(mesh.cloud.fields[i].name.compare("rgb") == 0)
2200  {
2201  colorOffset = mesh.cloud.fields[i].offset;
2202  break;
2203  }
2204  }
2205  if(colorOffset>0)
2206  {
2207  pcl::IndicesPtr notTexturedVertexIndices(new std::vector<int>);
2208  UASSERT(mesh.tex_coordinates.size() == mesh.tex_polygons.size());
2209  for(size_t t=0; t<mesh.tex_polygons.size(); ++t)
2210  {
2211  int pixelIndex = 0;
2212  for(size_t p=0; p<mesh.tex_polygons[t].size(); ++p)
2213  {
2214  // only clear polygon color if all 3 vertices have valid tex coordinates.
2215  bool valid = true;
2216  for(size_t v=0; v<mesh.tex_polygons[t][p].vertices.size() && valid; ++v)
2217  {
2218  UASSERT(pixelIndex+v < mesh.tex_coordinates[t].size());
2219  const Eigen::Vector2f & uv = mesh.tex_coordinates[t][pixelIndex+v];
2220  if(uv[0] == -1 || uv[1] == -1)
2221  {
2222  valid = false;
2223  }
2224  }
2225  if(!valid)
2226  {
2227  for(size_t v=0; v<mesh.tex_polygons[t][p].vertices.size(); ++v)
2228  {
2229  int vertex = mesh.tex_polygons[t][p].vertices[v];
2230  notTexturedVertexIndices->push_back(vertex);
2231  }
2232  }
2233  pixelIndex+=mesh.tex_polygons[t][p].vertices.size();
2234  }
2235  }
2236  // Set up the full indices set
2237  pcl::IndicesPtr full_indices(new std::vector<int>(mesh.cloud.width* mesh.cloud.height));
2238  for (size_t fii = 0; fii < full_indices->size(); ++fii) // fii = full indices iterator
2239  full_indices->at(fii) = fii;
2240 
2241  // Set up the sorted input indices
2242  std::sort (notTexturedVertexIndices->begin (), notTexturedVertexIndices->end ());
2243 
2244  // Store the difference in indices
2245  pcl::IndicesPtr texturedVertexIndices(new std::vector<int>());
2246  std::set_difference (full_indices->begin (), full_indices->end (), notTexturedVertexIndices->begin (), notTexturedVertexIndices->end (), std::inserter (*texturedVertexIndices, texturedVertexIndices->begin ()));
2247 
2248  for(size_t i=0; i<texturedVertexIndices->size(); ++i)
2249  {
2250  std::uint32_t white = 0xffffff;
2251  UASSERT(texturedVertexIndices->at(i) * mesh.cloud.point_step + colorOffset < mesh.cloud.data.size());
2252  memcpy(&mesh.cloud.data.data()[texturedVertexIndices->at(i) * mesh.cloud.point_step + colorOffset], reinterpret_cast<float*>(&white), sizeof(float));
2253  }
2254  }
2255  }
2256  }
2257  }
2258  UDEBUG("globalTextures=%d", globalTextures.cols?globalTextures.cols / globalTextures.rows:0);
2259  return globalTextures;
2260 }
2261 
2262 void fixTextureMeshForVisualization(pcl::TextureMesh & textureMesh)
2263 {
2264  // VTK issue:
2265  // tex_coordinates should be linked to points, not
2266  // polygon vertices. Points linked to multiple different TCoords (different textures) should
2267  // be duplicated.
2268  for (unsigned int t = 0; t < textureMesh.tex_coordinates.size(); ++t)
2269  {
2270  if(textureMesh.tex_polygons[t].size())
2271  {
2272  pcl::PointCloud<pcl::PointXYZ>::Ptr originalCloud(new pcl::PointCloud<pcl::PointXYZ>);
2273  pcl::fromPCLPointCloud2(textureMesh.cloud, *originalCloud);
2274 
2275  // make a cloud with as many points than polygon vertices
2276  unsigned int nPoints = textureMesh.tex_coordinates[t].size();
2277  UASSERT(nPoints == textureMesh.tex_polygons[t].size()*textureMesh.tex_polygons[t][0].vertices.size()); // assuming polygon size is constant!
2278 
2279  pcl::PointCloud<pcl::PointXYZ>::Ptr newCloud(new pcl::PointCloud<pcl::PointXYZ>);
2280  newCloud->resize(nPoints);
2281 
2282  unsigned int oi = 0;
2283  for (unsigned int i = 0; i < textureMesh.tex_polygons[t].size(); ++i)
2284  {
2285  pcl::Vertices & vertices = textureMesh.tex_polygons[t][i];
2286 
2287  for(unsigned int j=0; j<vertices.vertices.size(); ++j)
2288  {
2289  UASSERT(oi < newCloud->size());
2290  UASSERT_MSG((size_t)vertices.vertices[j] < originalCloud->size(), uFormat("%d vs %d", vertices.vertices[j], (int)originalCloud->size()).c_str());
2291  newCloud->at(oi) = originalCloud->at(vertices.vertices[j]);
2292  vertices.vertices[j] = oi; // new vertex index
2293  ++oi;
2294  }
2295  }
2296  pcl::toPCLPointCloud2(*newCloud, textureMesh.cloud);
2297  }
2298  }
2299 }
2300 
2302  const std::string & outputOBJPath,
2303  const pcl::PCLPointCloud2 & cloud,
2304  const std::vector<pcl::Vertices> & polygons,
2305  const std::map<int, Transform> & cameraPoses,
2306  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels, // required output of util3d::createTextureMesh()
2307  const std::map<int, cv::Mat> & images, // raw or compressed, can be empty if memory or dbDriver should be used
2308  const std::map<int, std::vector<CameraModel> > & cameraModels, // Should match images
2309  const Memory * memory, // Should be set if images are not set
2310  const DBDriver * dbDriver, // Should be set if images and memory are not set
2311  int textureSize,
2312  const std::string & textureFormat,
2313  const std::map<int, std::map<int, cv::Vec4d> > & gains, // optional output of util3d::mergeTextures()
2314  const std::map<int, std::map<int, cv::Mat> > & blendingGains, // optional output of util3d::mergeTextures()
2315  const std::pair<float, float> & contrastValues, // optional output of util3d::mergeTextures()
2316  bool gainRGB)
2317 {
2318  return multiBandTexturing(
2319  outputOBJPath,
2320  cloud,
2321  polygons,
2322  cameraPoses,
2323  vertexToPixels,
2324  images,
2325  cameraModels,
2326  memory,
2327  dbDriver,
2328  textureSize,
2329  2,
2330  "1 5 10 0",
2331  textureFormat,
2332  gains,
2333  blendingGains,
2334  contrastValues,
2335  gainRGB);
2336 }
2337 
2339  const std::string & outputOBJPath,
2340  const pcl::PCLPointCloud2 & cloud,
2341  const std::vector<pcl::Vertices> & polygons,
2342  const std::map<int, Transform> & cameraPoses,
2343  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels,
2344  const std::map<int, cv::Mat> & images,
2345  const std::map<int, std::vector<CameraModel> > & cameraModels,
2346  const Memory * memory,
2347  const DBDriver * dbDriver,
2348  unsigned int textureSize,
2349  unsigned int textureDownScale,
2350  const std::string & nbContrib,
2351  const std::string & textureFormat,
2352  const std::map<int, std::map<int, cv::Vec4d> > & gains,
2353  const std::map<int, std::map<int, cv::Mat> > & blendingGains,
2354  const std::pair<float, float> & contrastValues,
2355  bool gainRGB,
2356  unsigned int unwrapMethod,
2357  bool fillHoles,
2358  unsigned int padding,
2359  double bestScoreThreshold,
2360  double angleHardThreshold,
2361  bool forceVisibleByAllVertices)
2362 {
2363 
2364 #ifdef RTABMAP_ALICE_VISION
2366  {
2367  system::Logger::get()->setLogLevel(system::EVerboseLevel::Trace);
2368  }
2369  else if(ULogger::level() == ULogger::kInfo)
2370  {
2371  system::Logger::get()->setLogLevel(system::EVerboseLevel::Info);
2372  }
2373  else if(ULogger::level() == ULogger::kWarning)
2374  {
2375  system::Logger::get()->setLogLevel(system::EVerboseLevel::Warning);
2376  }
2377  else
2378  {
2379  system::Logger::get()->setLogLevel(system::EVerboseLevel::Error);
2380  }
2381 
2382  sfmData::SfMData sfmData;
2383  pcl::PointCloud<pcl::PointXYZRGB> cloud2;
2384  pcl::fromPCLPointCloud2(cloud, cloud2);
2385  UASSERT(vertexToPixels.size() == cloud2.size());
2386  UINFO("Input mesh: %d points %d polygons", (int)cloud2.size(), (int)polygons.size());
2387  mesh::Texturing texturing;
2388 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2389  texturing.mesh = new mesh::Mesh();
2390  texturing.mesh->pts.resize(cloud2.size());
2391  texturing.mesh->pointsVisibilities.resize(cloud2.size());
2392 #else
2393  texturing.me = new mesh::Mesh();
2394  texturing.me->pts = new StaticVector<Point3d>(cloud2.size());
2395  texturing.pointsVisibilities = new mesh::PointsVisibility();
2396  texturing.pointsVisibilities->reserve(cloud2.size());
2397 #endif
2398  texturing.texParams.textureSide = textureSize;
2399  texturing.texParams.downscale = textureDownScale;
2400  std::vector<int> multiBandNbContrib;
2401  std::list<std::string> values = uSplit(nbContrib, ' ');
2402  for(std::list<std::string>::iterator iter=values.begin(); iter!=values.end(); ++iter)
2403  {
2404  multiBandNbContrib.push_back(uStr2Int(*iter));
2405  }
2406  if(multiBandNbContrib.size() != 4)
2407  {
2408  UERROR("multiband: Wrong number of nb of contribution (vaue=\"%s\", should be 4), using default values instead.", nbContrib.c_str());
2409  }
2410  else
2411  {
2412  texturing.texParams.multiBandNbContrib = multiBandNbContrib;
2413  }
2414  texturing.texParams.padding = padding;
2415  texturing.texParams.fillHoles = fillHoles;
2416  texturing.texParams.bestScoreThreshold = bestScoreThreshold;
2417  texturing.texParams.angleHardThreshold = angleHardThreshold;
2418  texturing.texParams.forceVisibleByAllVertices = forceVisibleByAllVertices;
2419  texturing.texParams.visibilityRemappingMethod = mesh::EVisibilityRemappingMethod::Pull;
2420 
2421 
2422  for(size_t i=0;i<cloud2.size();++i)
2423  {
2424  pcl::PointXYZRGB pt = cloud2.at(i);
2425 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2426  texturing.mesh->pointsVisibilities[i].reserve(vertexToPixels[i].size());
2427  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[i].begin(); iter!=vertexToPixels[i].end();++iter)
2428  {
2429  texturing.mesh->pointsVisibilities[i].push_back(iter->first);
2430  }
2431  texturing.mesh->pts[i] = Point3d(pt.x, pt.y, pt.z);
2432 #else
2433  mesh::PointVisibility* pointVisibility = new mesh::PointVisibility();
2434  pointVisibility->reserve(vertexToPixels[i].size());
2435  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[i].begin(); iter!=vertexToPixels[i].end();++iter)
2436  {
2437  pointVisibility->push_back(iter->first);
2438  }
2439  texturing.pointsVisibilities->push_back(pointVisibility);
2440  (*texturing.me->pts)[i] = Point3d(pt.x, pt.y, pt.z);
2441 #endif
2442  }
2443 
2444 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2445  texturing.mesh->tris.resize(polygons.size());
2446  texturing.mesh->trisMtlIds().resize(polygons.size());
2447 #else
2448  texturing.me->tris = new StaticVector<mesh::Mesh::triangle>(polygons.size());
2449 #endif
2450  for(size_t i=0;i<polygons.size();++i)
2451  {
2452  UASSERT(polygons[i].vertices.size() == 3);
2453 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2454  texturing.mesh->trisMtlIds()[i] = -1;
2455  texturing.mesh->tris[i] = mesh::Mesh::triangle(
2456 #else
2457  (*texturing.me->tris)[i] = mesh::Mesh::triangle(
2458 #endif
2459  polygons[i].vertices[0],
2460  polygons[i].vertices[1],
2461  polygons[i].vertices[2]);
2462  }
2463  UTimer timer;
2464  std::string outputDirectory = UDirectory::getDir(outputOBJPath);
2465  std::string tmpImageDirectory = outputDirectory+"/rtabmap_tmp_textures";
2466  UDirectory::removeDir(tmpImageDirectory);
2467  UDirectory::makeDir(tmpImageDirectory);
2468  UINFO("Temporary saving images from %ld nodes in directory \"%s\"...", cameraPoses.size(), tmpImageDirectory.c_str());
2469  int viewId = 0;
2470  for(std::map<int, Transform>::const_iterator iter = cameraPoses.lower_bound(1); iter!=cameraPoses.end(); ++iter)
2471  {
2472  int camId = iter->first;
2473  cv::Mat image;
2474  std::vector<CameraModel> models;
2475 
2476  if( images.find(camId) != images.end() &&
2477  !images.find(camId)->second.empty() &&
2478  cameraModels.find(camId) != cameraModels.end())
2479  {
2480  image = images.find(camId)->second;
2481  models = cameraModels.find(camId)->second;
2482  }
2483  else if(memory)
2484  {
2485  SensorData data = memory->getNodeData(camId, true, false, false, false);
2486  models = data.cameraModels();
2487  if(models.empty() && data.stereoCameraModels().size())
2488  {
2489  for(size_t i=0; i<data.stereoCameraModels().size(); ++i)
2490  {
2491  models.push_back(data.stereoCameraModels()[i].left());
2492  }
2493  }
2494  if(data.imageRaw().empty())
2495  {
2496  image = data.imageCompressed();
2497  }
2498  else
2499  {
2500  image = data.imageRaw();
2501  }
2502 
2503  if(models.empty() || image.empty())
2504  {
2505  Transform odomPose;
2506  int mapId;
2507  int weight=0;
2508  std::string label;
2509  double stamp;
2510  Transform gt;
2511  std::vector<float> vel;
2512  GPS gps;
2513  EnvSensors envs;
2514  memory->getNodeInfo(camId, odomPose, mapId, weight, label, stamp, gt, vel, gps, envs, true);
2515  if(weight == -1) // just ignore intermediate nodes if their data is not set
2516  continue;
2517  }
2518  }
2519  else if(dbDriver)
2520  {
2521  std::vector<StereoCameraModel> stereoModels;
2522  dbDriver->getCalibration(camId, models, stereoModels);
2523  if(models.empty() && stereoModels.size())
2524  {
2525  for(size_t i=0; i<stereoModels.size(); ++i)
2526  {
2527  models.push_back(stereoModels[i].left());
2528  }
2529  }
2530 
2531  SensorData data;
2532  dbDriver->getNodeData(camId, data, true, false, false, false);
2533  if(data.imageRaw().empty())
2534  {
2535  image = data.imageCompressed();
2536  }
2537  else
2538  {
2539  image = data.imageRaw();
2540  }
2541 
2542  if(models.empty() || image.empty())
2543  {
2544  int weight=0;
2545  dbDriver->getWeight(camId, weight);
2546  if(weight == -1) // just ignore intermediate nodes if their data is not set
2547  continue;
2548  }
2549  }
2550  if(models.empty())
2551  {
2552  UERROR("No camera models found for camera %d. Aborting multiband texturing...", iter->first);
2553  return false;
2554  }
2555  if(image.empty())
2556  {
2557  UERROR("No image found for camera %d. Aborting multiband texturing...", iter->first);
2558  return false;
2559  }
2560 
2561  if(image.rows == 1 && image.type() == CV_8UC1)
2562  {
2563  image = uncompressImage(image);
2564  }
2565  else
2566  {
2567  image = image.clone();
2568  }
2569 
2570  for(size_t i=0; i<models.size(); ++i)
2571  {
2572  const CameraModel & model = models.at(i);
2573  cv::Size imageSize = model.imageSize();
2574  if(imageSize.height == 0)
2575  {
2576  // backward compatibility
2577  imageSize.height = image.rows;
2578  imageSize.width = image.cols;
2579  }
2580 
2581  UASSERT(image.cols % imageSize.width == 0);
2582  cv::Mat imageRoi = image.colRange(i*imageSize.width, (i+1)*imageSize.width);
2583 
2584  if(gains.find(camId) != gains.end() &&
2585  gains.at(camId).find(i) != gains.at(camId).end())
2586  {
2587  const cv::Vec4d & g = gains.at(camId).at(i);
2588  if(imageRoi.channels() == 1)
2589  {
2590  cv::multiply(imageRoi, g.val[0], imageRoi);
2591  }
2592  else
2593  {
2594  std::vector<cv::Mat> channels;
2595  cv::split(imageRoi, channels);
2596 
2597  // assuming BGR
2598  cv::multiply(channels[0], g.val[gainRGB?3:0], channels[0]);
2599  cv::multiply(channels[1], g.val[gainRGB?2:0], channels[1]);
2600  cv::multiply(channels[2], g.val[gainRGB?1:0], channels[2]);
2601 
2602  cv::Mat output;
2603  cv::merge(channels, output);
2604  imageRoi = output;
2605  }
2606  }
2607 
2608  if(blendingGains.find(camId) != blendingGains.end() &&
2609  blendingGains.at(camId).find(i) != blendingGains.at(camId).end())
2610  {
2611  // Should be color for blending options
2612  if(imageRoi.channels() == 1)
2613  {
2614  cv::Mat imageRoiColor;
2615  cv::cvtColor(imageRoi, imageRoiColor, CV_GRAY2BGR);
2616  imageRoi = imageRoiColor;
2617  }
2618 
2619  cv::Mat g = blendingGains.at(camId).at(i);
2620  cv::Mat dst;
2621  cv::blur(g, dst, cv::Size(3,3));
2622  cv::Mat gResized;
2623  cv::resize(dst, gResized, imageRoi.size(), 0, 0, cv::INTER_LINEAR);
2624  cv::Mat output;
2625  cv::multiply(imageRoi, gResized, output, 1.0, CV_8UC3);
2626  imageRoi = output;
2627  }
2628 
2629  Transform t = (iter->second * model.localTransform()).inverse();
2630  Eigen::Matrix<double, 3, 4> m = t.toEigen3d().matrix().block<3,4>(0, 0);
2631  sfmData::CameraPose pose(geometry::Pose3(m), true);
2632  sfmData.setAbsolutePose((IndexT)viewId, pose);
2633 
2634  UDEBUG("%d %d %f %f %f %f", imageSize.width, imageSize.height, model.fx(), model.fy(), model.cx(), model.cy());
2635  std::shared_ptr<camera::IntrinsicBase> camPtr = std::make_shared<camera::Pinhole>(
2636 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=4)
2637  //https://github.com/alicevision/AliceVision/commit/9fab5c79a1c65595fe5c5001267e1c5212bc93f0#diff-b0c0a3c30de50be8e4ed283dfe4c8ae4a9bc861aa9a83bd8bfda8182e9d67c08
2638  // [all] the camera principal point is now defined as an offset relative to the image center
2639  imageSize.width, imageSize.height, model.fx(), model.fy(), model.cx() - double(imageSize.width) * 0.5, model.cy() - double(imageSize.height) * 0.5);
2640 #else
2641  imageSize.width, imageSize.height, model.fx(), model.cx(), model.cy());
2642 #endif
2643  sfmData.intrinsics.insert(std::make_pair((IndexT)viewId, camPtr));
2644 
2645  std::string imagePath = tmpImageDirectory+uFormat("/%d.jpg", viewId);
2646  cv::imwrite(imagePath, imageRoi);
2647  std::shared_ptr<sfmData::View> viewPtr = std::make_shared<sfmData::View>(
2648  imagePath,
2649  (IndexT)viewId,
2650  (IndexT)viewId,
2651  (IndexT)viewId,
2652  imageSize.width,
2653  imageSize.height);
2654  sfmData.views.insert(std::make_pair((IndexT)viewId, viewPtr));
2655  ++viewId;
2656  }
2657  UDEBUG("camId=%d", camId);
2658  }
2659  UINFO("Temporary saving images in directory \"%s\"... done (%d images of %d nodes). %fs", tmpImageDirectory.c_str(), viewId, (int)cameraPoses.size(), timer.ticks());
2660 
2661  mvsUtils::MultiViewParams mp(sfmData);
2662 
2663  UINFO("Unwrapping (method=%d=%s)...", unwrapMethod, mesh::EUnwrapMethod_enumToString((mesh::EUnwrapMethod)unwrapMethod).c_str());
2664  texturing.unwrap(mp, (mesh::EUnwrapMethod)unwrapMethod);
2665  UINFO("Unwrapping done. %fs", timer.ticks());
2666 
2667  // save final obj file
2668  std::string baseName = uSplit(UFile::getName(outputOBJPath), '.').front();
2669 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=4)
2670  texturing.saveAs(outputDirectory, baseName, aliceVision::mesh::EFileType::OBJ, imageIO::EImageFileType::PNG);
2671 #else
2672  texturing.saveAsOBJ(outputDirectory, baseName);
2673 #endif
2674  UINFO("Saved %s. %fs", outputOBJPath.c_str(), timer.ticks());
2675 
2676  // generate textures
2677  UINFO("Generating textures...");
2678  texturing.generateTextures(mp, outputDirectory);
2679  UINFO("Generating textures done. %fs", timer.ticks());
2680 
2681  UINFO("Cleanup temporary directory \"%s\"...", tmpImageDirectory.c_str());
2682  UDirectory dir(tmpImageDirectory);
2683  std::string fp = dir.getNextFilePath();
2684  while(!fp.empty())
2685  {
2686  UFile::erase(fp);
2687  fp = dir.getNextFilePath();
2688  }
2689  UDirectory::removeDir(tmpImageDirectory);
2690  UINFO("Cleanup temporary directory \"%s\"... done.", tmpImageDirectory.c_str());
2691 
2692  UINFO("Rename/convert textures...");
2693  dir.setPath(outputDirectory, "png");
2694  std::map<std::string, std::string> texNames; // <old, new>
2695  std::string outputFormat = textureFormat;
2696  if(outputFormat.front() == '.')
2697  {
2698  outputFormat = outputFormat.substr(1, std::string::npos);
2699  }
2700  for(std::list<std::string>::const_iterator iter=dir.getFileNames().begin(); iter!=dir.getFileNames().end(); ++iter)
2701  {
2702  // Textures are called "texture_1001.png", "texture_1002.png", ...
2703  if(uStrContains(*iter, "texture_10"))
2704  {
2705  cv::Mat img = cv::imread(outputDirectory+"/"+*iter);
2706  if(contrastValues.first != 0.0f || contrastValues.second != 0.0f)
2707  {
2708  UASSERT(img.channels() == 3);
2709  // Re-use same contrast values with all images
2710  UINFO("Apply contrast values %f %f", contrastValues.first, contrastValues.second);
2711  img.convertTo(img, -1, contrastValues.first, contrastValues.second);
2712  }
2713  std::string newName = *iter;
2714  boost::replace_all(newName, "png", outputFormat);
2715  boost::replace_all(newName, "texture", baseName);
2716  texNames.insert(std::make_pair(*iter, newName));
2717  cv::imwrite(outputDirectory+"/"+newName, img);
2718  UFile::erase(outputDirectory+"/"+*iter);
2719  }
2720  }
2721  std::ifstream fi(outputDirectory+"/"+baseName+".mtl");
2722  std::string mtlStr((std::istreambuf_iterator<char>(fi)),
2723  std::istreambuf_iterator<char>());
2724  fi.close();
2725  UFile::erase(outputDirectory+"/"+baseName);
2726  for(std::map<std::string, std::string>::iterator iter=texNames.begin(); iter!=texNames.end(); ++iter)
2727  {
2728  boost::replace_all(mtlStr, iter->first, iter->second);
2729  }
2730  std::ofstream fo(outputDirectory+"/"+baseName+".mtl");
2731  fo.write(mtlStr.c_str(), mtlStr.size());
2732  fo.close();
2733  UINFO("Rename/convert textures... done. %fs", timer.ticks());
2734 
2735 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2736  UINFO("Cleanup sfmdata...");
2737  sfmData.clear();
2738  UINFO("Cleanup sfmdata... done. %fs", timer.ticks());
2739 #endif
2740 
2741  return true;
2742 #else
2743  UERROR("Cannot unwrap texture mesh. RTAB-Map is not built with Alice Vision support! Returning false.");
2744  return false;
2745 #endif
2746 }
2747 
2749  const LaserScan & laserScan,
2750  int searchK,
2751  float searchRadius)
2752 {
2753  if(laserScan.isEmpty())
2754  {
2755  return laserScan;
2756  }
2757 
2758  pcl::PointCloud<pcl::Normal>::Ptr normals;
2759  // convert to compatible PCL format and filter it
2760  if(laserScan.hasRGB())
2761  {
2762  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = laserScanToPointCloudRGB(laserScan);
2763  if(cloud->size())
2764  {
2765  UASSERT(!laserScan.is2d());
2766  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, searchK, searchRadius);
2767  return LaserScan(laserScanFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), laserScan.localTransform());
2768  }
2769  }
2770  else if(laserScan.hasIntensity())
2771  {
2772  pcl::PointCloud<pcl::PointXYZI>::Ptr cloud = laserScanToPointCloudI(laserScan);
2773  if(cloud->size())
2774  {
2775  if(laserScan.is2d())
2776  {
2777  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals2D(cloud, searchK, searchRadius);
2778  if(laserScan.angleIncrement() > 0.0f)
2779  {
2780  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), laserScan.rangeMin(), laserScan.rangeMax(), laserScan.angleMin(), laserScan.angleMax(), laserScan.angleIncrement(), laserScan.localTransform());
2781  }
2782  else
2783  {
2784  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), laserScan.localTransform());
2785  }
2786 
2787  }
2788  else
2789  {
2790  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, searchK, searchRadius);
2791  return LaserScan(laserScanFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), laserScan.localTransform());
2792  }
2793  }
2794  }
2795  else
2796  {
2797  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = laserScanToPointCloud(laserScan);
2798  if(cloud->size())
2799  {
2800  if(laserScan.is2d())
2801  {
2802  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals2D(cloud, searchK, searchRadius);
2803  if(laserScan.angleIncrement() > 0.0f)
2804  {
2805  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), laserScan.rangeMin(), laserScan.rangeMax(), laserScan.angleMin(), laserScan.angleMax(), laserScan.angleIncrement(), laserScan.localTransform());
2806  }
2807  else
2808  {
2809  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), laserScan.localTransform());
2810  }
2811  }
2812  else
2813  {
2814  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, searchK, searchRadius);
2815  return LaserScan(laserScanFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), laserScan.localTransform());
2816  }
2817  }
2818  }
2819  return LaserScan();
2820 }
2821 
2822 template<typename PointT>
2823 pcl::PointCloud<pcl::Normal>::Ptr computeNormalsImpl(
2824  const typename pcl::PointCloud<PointT>::Ptr & cloud,
2825  const pcl::IndicesPtr & indices,
2826  int searchK,
2827  float searchRadius,
2828  const Eigen::Vector3f & viewPoint)
2829 {
2830  typename pcl::search::KdTree<PointT>::Ptr tree (new pcl::search::KdTree<PointT>);
2831  if(indices->size())
2832  {
2833  tree->setInputCloud(cloud, indices);
2834  }
2835  else
2836  {
2837  tree->setInputCloud (cloud);
2838  }
2839 
2840  // Normal estimation*
2841 #ifdef PCL_OMP
2842  pcl::NormalEstimationOMP<PointT, pcl::Normal> n;
2843 #else
2844  pcl::NormalEstimation<PointT, pcl::Normal> n;
2845 #endif
2846  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
2847  n.setInputCloud (cloud);
2848  // Commented: Keep the output normals size the same as the input cloud
2849  //if(indices->size())
2850  //{
2851  // n.setIndices(indices);
2852  //}
2853  n.setSearchMethod (tree);
2854  n.setKSearch (searchK);
2855  n.setRadiusSearch (searchRadius);
2856  n.setViewPoint(viewPoint[0], viewPoint[1], viewPoint[2]);
2857  n.compute (*normals);
2858 
2859  return normals;
2860 }
2861 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2862  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
2863  int searchK,
2864  float searchRadius,
2865  const Eigen::Vector3f & viewPoint)
2866 {
2867  pcl::IndicesPtr indices(new std::vector<int>);
2868  return computeNormals(cloud, indices, searchK, searchRadius, viewPoint);
2869 }
2870 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2871  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
2872  int searchK,
2873  float searchRadius,
2874  const Eigen::Vector3f & viewPoint)
2875 {
2876  pcl::IndicesPtr indices(new std::vector<int>);
2877  return computeNormals(cloud, indices, searchK, searchRadius, viewPoint);
2878 }
2879 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2880  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
2881  int searchK,
2882  float searchRadius,
2883  const Eigen::Vector3f & viewPoint)
2884 {
2885  pcl::IndicesPtr indices(new std::vector<int>);
2886  return computeNormals(cloud, indices, searchK, searchRadius, viewPoint);
2887 }
2888 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2889  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
2890  const pcl::IndicesPtr & indices,
2891  int searchK,
2892  float searchRadius,
2893  const Eigen::Vector3f & viewPoint)
2894 {
2895  return computeNormalsImpl<pcl::PointXYZ>(cloud, indices, searchK, searchRadius, viewPoint);
2896 }
2897 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2898  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
2899  const pcl::IndicesPtr & indices,
2900  int searchK,
2901  float searchRadius,
2902  const Eigen::Vector3f & viewPoint)
2903 {
2904  return computeNormalsImpl<pcl::PointXYZRGB>(cloud, indices, searchK, searchRadius, viewPoint);
2905 }
2906 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2907  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
2908  const pcl::IndicesPtr & indices,
2909  int searchK,
2910  float searchRadius,
2911  const Eigen::Vector3f & viewPoint)
2912 {
2913  return computeNormalsImpl<pcl::PointXYZI>(cloud, indices, searchK, searchRadius, viewPoint);
2914 }
2915 
2916 template<typename PointT>
2917 pcl::PointCloud<pcl::Normal>::Ptr computeNormals2DImpl(
2918  const typename pcl::PointCloud<PointT>::Ptr & cloud,
2919  int searchK,
2920  float searchRadius,
2921  const Eigen::Vector3f & viewPoint)
2922 {
2923  UASSERT(searchK>0 || searchRadius>0.0f);
2924  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
2925 
2926  typename pcl::search::KdTree<PointT>::Ptr tree (new pcl::search::KdTree<PointT>);
2927  tree->setInputCloud (cloud);
2928 
2929  normals->resize(cloud->size());
2930 
2931  float bad_point = std::numeric_limits<float>::quiet_NaN ();
2932 
2933  // assuming that points are ordered
2934  for(unsigned int i=0; i<cloud->size(); ++i)
2935  {
2936  const PointT & pt = cloud->at(i);
2937  std::vector<Eigen::Vector3f> neighborNormals;
2938  Eigen::Vector3f direction;
2939  direction[0] = viewPoint[0] - pt.x;
2940  direction[1] = viewPoint[1] - pt.y;
2941  direction[2] = viewPoint[2] - pt.z;
2942 
2943  std::vector<int> k_indices;
2944  std::vector<float> k_sqr_distances;
2945  if(searchRadius>0.0f)
2946  {
2947  tree->radiusSearch(cloud->at(i), searchRadius, k_indices, k_sqr_distances, searchK);
2948  }
2949  else
2950  {
2951  tree->nearestKSearch(cloud->at(i), searchK, k_indices, k_sqr_distances);
2952  }
2953 
2954  for(unsigned int j=0; j<k_indices.size(); ++j)
2955  {
2956  if(k_indices.at(j) != (int)i)
2957  {
2958  const PointT & pt2 = cloud->at(k_indices.at(j));
2959  Eigen::Vector3f v(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
2960  Eigen::Vector3f up = v.cross(direction);
2961  Eigen::Vector3f n = up.cross(v);
2962  n.normalize();
2963  neighborNormals.push_back(n);
2964  }
2965  }
2966 
2967  if(neighborNormals.empty())
2968  {
2969  normals->at(i).normal_x = bad_point;
2970  normals->at(i).normal_y = bad_point;
2971  normals->at(i).normal_z = bad_point;
2972  }
2973  else
2974  {
2975  Eigen::Vector3f meanNormal(0,0,0);
2976  for(unsigned int j=0; j<neighborNormals.size(); ++j)
2977  {
2978  meanNormal+=neighborNormals[j];
2979  }
2980  meanNormal /= (float)neighborNormals.size();
2981  meanNormal.normalize();
2982  normals->at(i).normal_x = meanNormal[0];
2983  normals->at(i).normal_y = meanNormal[1];
2984  normals->at(i).normal_z = meanNormal[2];
2985  }
2986  }
2987 
2988  return normals;
2989 }
2990 pcl::PointCloud<pcl::Normal>::Ptr computeNormals2D(
2991  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
2992  int searchK,
2993  float searchRadius,
2994  const Eigen::Vector3f & viewPoint)
2995 {
2996  return computeNormals2DImpl<pcl::PointXYZ>(cloud, searchK, searchRadius, viewPoint);
2997 }
2998 pcl::PointCloud<pcl::Normal>::Ptr computeNormals2D(
2999  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
3000  int searchK,
3001  float searchRadius,
3002  const Eigen::Vector3f & viewPoint)
3003 {
3004  return computeNormals2DImpl<pcl::PointXYZI>(cloud, searchK, searchRadius, viewPoint);
3005 }
3006 
3007 template<typename PointT>
3008 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals2DImpl(
3009  const typename pcl::PointCloud<PointT>::Ptr & cloud,
3010  int searchK,
3011  float searchRadius,
3012  const Eigen::Vector3f & viewPoint)
3013 {
3014  UASSERT(searchK>0);
3015  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
3016 
3017  normals->resize(cloud->size());
3018  searchRadius *= searchRadius; // squared distance
3019 
3020  float bad_point = std::numeric_limits<float>::quiet_NaN ();
3021 
3022  // assuming that points are ordered
3023  for(int i=0; i<(int)cloud->size(); ++i)
3024  {
3025  int li = i-searchK;
3026  if(li<0)
3027  {
3028  li=0;
3029  }
3030  int hi = i+searchK;
3031  if(hi>=(int)cloud->size())
3032  {
3033  hi=(int)cloud->size()-1;
3034  }
3035 
3036  // get points before not too far
3037  const PointT & pt = cloud->at(i);
3038  std::vector<Eigen::Vector3f> neighborNormals;
3039  Eigen::Vector3f direction;
3040  direction[0] = viewPoint[0] - cloud->at(i).x;
3041  direction[1] = viewPoint[1] - cloud->at(i).y;
3042  direction[2] = viewPoint[2] - cloud->at(i).z;
3043  for(int j=i-1; j>=li; --j)
3044  {
3045  const PointT & pt2 = cloud->at(j);
3046  Eigen::Vector3f vd(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
3047  if(searchRadius<=0.0f || (vd[0]*vd[0] + vd[1]*vd[1] + vd[2]*vd[2]) < searchRadius)
3048  {
3049  Eigen::Vector3f v(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
3050  Eigen::Vector3f up = v.cross(direction);
3051  Eigen::Vector3f n = up.cross(v);
3052  n.normalize();
3053  neighborNormals.push_back(n);
3054  }
3055  else
3056  {
3057  break;
3058  }
3059  }
3060  for(int j=i+1; j<=hi; ++j)
3061  {
3062  const PointT & pt2 = cloud->at(j);
3063  Eigen::Vector3f vd(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
3064  if(searchRadius<=0.0f || (vd[0]*vd[0] + vd[1]*vd[1] + vd[2]*vd[2]) < searchRadius)
3065  {
3066  Eigen::Vector3f v(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
3067  Eigen::Vector3f up = v[2]==0.0f?Eigen::Vector3f(0,0,1):v.cross(direction);
3068  Eigen::Vector3f n = up.cross(v);
3069  n.normalize();
3070  neighborNormals.push_back(n);
3071  }
3072  else
3073  {
3074  break;
3075  }
3076  }
3077 
3078  if(neighborNormals.empty())
3079  {
3080  normals->at(i).normal_x = bad_point;
3081  normals->at(i).normal_y = bad_point;
3082  normals->at(i).normal_z = bad_point;
3083  }
3084  else
3085  {
3086  Eigen::Vector3f meanNormal(0,0,0);
3087  for(unsigned int j=0; j<neighborNormals.size(); ++j)
3088  {
3089  meanNormal+=neighborNormals[j];
3090  }
3091  meanNormal /= (float)neighborNormals.size();
3092  meanNormal.normalize();
3093  normals->at(i).normal_x = meanNormal[0];
3094  normals->at(i).normal_y = meanNormal[1];
3095  normals->at(i).normal_z = meanNormal[2];
3096  }
3097  }
3098 
3099  return normals;
3100 }
3101 
3102 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals2D(
3103  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
3104  int searchK,
3105  float searchRadius,
3106  const Eigen::Vector3f & viewPoint)
3107 {
3108  return computeFastOrganizedNormals2DImpl<pcl::PointXYZ>(cloud, searchK, searchRadius, viewPoint);
3109 }
3110 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals2D(
3111  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
3112  int searchK,
3113  float searchRadius,
3114  const Eigen::Vector3f & viewPoint)
3115 {
3116  return computeFastOrganizedNormals2DImpl<pcl::PointXYZI>(cloud, searchK, searchRadius, viewPoint);
3117 }
3118 
3119 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals(
3120  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
3121  float maxDepthChangeFactor,
3122  float normalSmoothingSize,
3123  const Eigen::Vector3f & viewPoint)
3124 {
3125  pcl::IndicesPtr indices(new std::vector<int>);
3126  return computeFastOrganizedNormals(cloud, indices, maxDepthChangeFactor, normalSmoothingSize, viewPoint);
3127 }
3128 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals(
3129  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
3130  const pcl::IndicesPtr & indices,
3131  float maxDepthChangeFactor,
3132  float normalSmoothingSize,
3133  const Eigen::Vector3f & viewPoint)
3134 {
3135  UASSERT(cloud->isOrganized());
3136 
3137  pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB>);
3138  if(indices->size())
3139  {
3140  tree->setInputCloud(cloud, indices);
3141  }
3142  else
3143  {
3144  tree->setInputCloud (cloud);
3145  }
3146 
3147  // Normal estimation
3148  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
3149  pcl::IntegralImageNormalEstimation<pcl::PointXYZRGB, pcl::Normal> ne;
3150  ne.setNormalEstimationMethod (ne.AVERAGE_3D_GRADIENT);
3151  ne.setMaxDepthChangeFactor(maxDepthChangeFactor);
3152  ne.setNormalSmoothingSize(normalSmoothingSize);
3153  ne.setBorderPolicy(ne.BORDER_POLICY_MIRROR);
3154  ne.setInputCloud(cloud);
3155  // Commented: Keep the output normals size the same as the input cloud
3156  //if(indices->size())
3157  //{
3158  // ne.setIndices(indices);
3159  //}
3160  ne.setSearchMethod(tree);
3161  ne.setViewPoint(viewPoint[0], viewPoint[1], viewPoint[2]);
3162  ne.compute(*normals);
3163 
3164  return normals;
3165 }
3166 
3168  const LaserScan & scan,
3169  const Transform & t,
3170  cv::Mat * pcaEigenVectors,
3171  cv::Mat * pcaEigenValues)
3172 {
3173  if(!scan.isEmpty() && (scan.hasNormals()))
3174  {
3175  //Construct a buffer used by the pca analysis
3176  int sz = static_cast<int>(scan.size()*2);
3177  bool is2d = scan.is2d();
3178  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3179  int oi = 0;
3180  int nOffset = scan.getNormalsOffset();
3181  bool doTransform = false;
3182  Transform tn;
3183  if(!t.isIdentity() || !scan.localTransform().isIdentity())
3184  {
3185  tn = (t*scan.localTransform()).rotation();
3186  doTransform = true;
3187  }
3188  for (int i = 0; i < scan.size(); ++i)
3189  {
3190  const float * ptrScan = scan.data().ptr<float>(0, i);
3191 
3192  if(is2d)
3193  {
3194  if(uIsFinite(ptrScan[nOffset]) && uIsFinite(ptrScan[nOffset+1]))
3195  {
3196  cv::Point3f n(ptrScan[nOffset], ptrScan[nOffset+1], 0);
3197  if(doTransform)
3198  {
3199  n = util3d::transformPoint(n, tn);
3200  }
3201  float * ptr = data_normals.ptr<float>(oi++, 0);
3202  ptr[0] = n.x;
3203  ptr[1] = n.y;
3204  }
3205  }
3206  else
3207  {
3208  if(uIsFinite(ptrScan[nOffset]) && uIsFinite(ptrScan[nOffset+1]) && uIsFinite(ptrScan[nOffset+2]))
3209  {
3210  cv::Point3f n(ptrScan[nOffset], ptrScan[nOffset+1], ptrScan[nOffset+2]);
3211  if(doTransform)
3212  {
3213  n = util3d::transformPoint(n, tn);
3214  }
3215  float * ptr = data_normals.ptr<float>(oi++, 0);
3216  ptr[0] = n.x;
3217  ptr[1] = n.y;
3218  ptr[2] = n.z;
3219  }
3220  }
3221  }
3222  if(oi>1)
3223  {
3224  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3225 
3226  if(pcaEigenVectors)
3227  {
3228  *pcaEigenVectors = pca_analysis.eigenvectors;
3229  }
3230  if(pcaEigenValues)
3231  {
3232  *pcaEigenValues = pca_analysis.eigenvalues;
3233  }
3234  UASSERT((is2d && pca_analysis.eigenvalues.total()>=2) || (!is2d && pca_analysis.eigenvalues.total()>=3));
3235  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3236  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3237  }
3238  }
3239  else if(!scan.isEmpty())
3240  {
3241  UERROR("Scan doesn't have normals!");
3242  }
3243  return 0.0f;
3244 }
3245 
3247  const pcl::PointCloud<pcl::PointNormal> & cloud,
3248  const Transform & t,
3249  bool is2d,
3250  cv::Mat * pcaEigenVectors,
3251  cv::Mat * pcaEigenValues)
3252 {
3253  //Construct a buffer used by the pca analysis
3254  int sz = static_cast<int>(cloud.size()*2);
3255  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3256  int oi = 0;
3257  bool doTransform = false;
3258  Transform tn;
3259  if(!t.isIdentity() && !t.isNull())
3260  {
3261  tn = t.rotation();
3262  doTransform = true;
3263  }
3264  for (unsigned int i = 0; i < cloud.size(); ++i)
3265  {
3266  const pcl::PointNormal & pt = cloud.at(i);
3267  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3268  if(doTransform)
3269  {
3270  n = util3d::transformPoint(n, tn);
3271  }
3272  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3273  {
3274  float * ptr = data_normals.ptr<float>(oi++, 0);
3275  ptr[0] = n.x;
3276  ptr[1] = n.y;
3277  if(!is2d)
3278  {
3279  ptr[2] = n.z;
3280  }
3281  }
3282  }
3283  if(oi>1)
3284  {
3285  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3286 
3287  if(pcaEigenVectors)
3288  {
3289  *pcaEigenVectors = pca_analysis.eigenvectors;
3290  }
3291  if(pcaEigenValues)
3292  {
3293  *pcaEigenValues = pca_analysis.eigenvalues;
3294  }
3295 
3296  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3297  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3298  }
3299  return 0.0f;
3300 }
3301 
3303  const pcl::PointCloud<pcl::Normal> & normals,
3304  const Transform & t,
3305  bool is2d,
3306  cv::Mat * pcaEigenVectors,
3307  cv::Mat * pcaEigenValues)
3308 {
3309  //Construct a buffer used by the pca analysis
3310  int sz = static_cast<int>(normals.size()*2);
3311  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3312  int oi = 0;
3313  bool doTransform = false;
3314  Transform tn;
3315  if(!t.isIdentity())
3316  {
3317  tn = t.rotation();
3318  doTransform = true;
3319  }
3320  for (unsigned int i = 0; i < normals.size(); ++i)
3321  {
3322  const pcl::Normal & pt = normals.at(i);
3323  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3324  if(doTransform)
3325  {
3326  n = util3d::transformPoint(n, tn);
3327  }
3328  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3329  {
3330  float * ptr = data_normals.ptr<float>(oi++, 0);
3331  ptr[0] = n.x;
3332  ptr[1] = n.y;
3333  if(!is2d)
3334  {
3335  ptr[2] = n.z;
3336  }
3337  }
3338  }
3339  if(oi>1)
3340  {
3341  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3342 
3343  if(pcaEigenVectors)
3344  {
3345  *pcaEigenVectors = pca_analysis.eigenvectors;
3346  }
3347  if(pcaEigenValues)
3348  {
3349  *pcaEigenValues = pca_analysis.eigenvalues;
3350  }
3351 
3352  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3353  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3354  }
3355  return 0.0f;
3356 }
3357 
3359  const pcl::PointCloud<pcl::PointXYZINormal> & cloud,
3360  const Transform & t,
3361  bool is2d,
3362  cv::Mat * pcaEigenVectors,
3363  cv::Mat * pcaEigenValues)
3364 {
3365  //Construct a buffer used by the pca analysis
3366  int sz = static_cast<int>(cloud.size()*2);
3367  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3368  int oi = 0;
3369  bool doTransform = false;
3370  Transform tn;
3371  if(!t.isIdentity())
3372  {
3373  tn = t.rotation();
3374  doTransform = true;
3375  }
3376  for (unsigned int i = 0; i < cloud.size(); ++i)
3377  {
3378  const pcl::PointXYZINormal & pt = cloud.at(i);
3379  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3380  if(doTransform)
3381  {
3382  n = util3d::transformPoint(n, tn);
3383  }
3384  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3385  {
3386  float * ptr = data_normals.ptr<float>(oi++, 0);
3387  ptr[0] = n.x;
3388  ptr[1] = n.y;
3389  if(!is2d)
3390  {
3391  ptr[2] = n.z;
3392  }
3393  }
3394  }
3395  if(oi>1)
3396  {
3397  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3398 
3399  if(pcaEigenVectors)
3400  {
3401  *pcaEigenVectors = pca_analysis.eigenvectors;
3402  }
3403  if(pcaEigenValues)
3404  {
3405  *pcaEigenValues = pca_analysis.eigenvalues;
3406  }
3407 
3408  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3409  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3410  }
3411  return 0.0f;
3412 }
3413 
3415  const pcl::PointCloud<pcl::PointXYZRGBNormal> & cloud,
3416  const Transform & t,
3417  bool is2d,
3418  cv::Mat * pcaEigenVectors,
3419  cv::Mat * pcaEigenValues)
3420 {
3421  //Construct a buffer used by the pca analysis
3422  int sz = static_cast<int>(cloud.size()*2);
3423  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3424  int oi = 0;
3425  bool doTransform = false;
3426  Transform tn;
3427  if(!t.isIdentity())
3428  {
3429  tn = t.rotation();
3430  doTransform = true;
3431  }
3432  for (unsigned int i = 0; i < cloud.size(); ++i)
3433  {
3434  const pcl::PointXYZRGBNormal & pt = cloud.at(i);
3435  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3436  if(doTransform)
3437  {
3438  n = util3d::transformPoint(n, tn);
3439  }
3440  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3441  {
3442  float * ptr = data_normals.ptr<float>(oi++, 0);
3443  ptr[0] = n.x;
3444  ptr[1] = n.y;
3445  if(!is2d)
3446  {
3447  ptr[2] = n.z;
3448  }
3449  }
3450  }
3451  if(oi>1)
3452  {
3453  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3454 
3455  if(pcaEigenVectors)
3456  {
3457  *pcaEigenVectors = pca_analysis.eigenvectors;
3458  }
3459  if(pcaEigenValues)
3460  {
3461  *pcaEigenValues = pca_analysis.eigenvalues;
3462  }
3463 
3464  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3465  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3466  }
3467  return 0.0f;
3468 }
3469 
3470 pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr mls(
3471  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
3472  float searchRadius,
3473  int polygonialOrder,
3474  int upsamplingMethod, // NONE, DISTINCT_CLOUD, SAMPLE_LOCAL_PLANE, RANDOM_UNIFORM_DENSITY, VOXEL_GRID_DILATION
3475  float upsamplingRadius, // SAMPLE_LOCAL_PLANE
3476  float upsamplingStep, // SAMPLE_LOCAL_PLANE
3477  int pointDensity, // RANDOM_UNIFORM_DENSITY
3478  float dilationVoxelSize, // VOXEL_GRID_DILATION
3479  int dilationIterations) // VOXEL_GRID_DILATION
3480 {
3481  pcl::IndicesPtr indices(new std::vector<int>);
3482  return mls(cloud,
3483  indices,
3484  searchRadius,
3485  polygonialOrder,
3486  upsamplingMethod,
3487  upsamplingRadius,
3488  upsamplingStep,
3489  pointDensity,
3490  dilationVoxelSize,
3491  dilationIterations);
3492 }
3493 
3494 pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr mls(
3495  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
3496  const pcl::IndicesPtr & indices,
3497  float searchRadius,
3498  int polygonialOrder,
3499  int upsamplingMethod, // NONE, DISTINCT_CLOUD, SAMPLE_LOCAL_PLANE, RANDOM_UNIFORM_DENSITY, VOXEL_GRID_DILATION
3500  float upsamplingRadius, // SAMPLE_LOCAL_PLANE
3501  float upsamplingStep, // SAMPLE_LOCAL_PLANE
3502  int pointDensity, // RANDOM_UNIFORM_DENSITY
3503  float dilationVoxelSize, // VOXEL_GRID_DILATION
3504  int dilationIterations) // VOXEL_GRID_DILATION
3505 {
3506  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
3507  pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB>);
3508  if(indices->size())
3509  {
3510  tree->setInputCloud (cloud, indices);
3511  }
3512  else
3513  {
3514  tree->setInputCloud (cloud);
3515  }
3516 
3517  // Init object (second point type is for the normals)
3518  pcl::MovingLeastSquares<pcl::PointXYZRGB, pcl::PointXYZRGBNormal> mls;
3519 
3520  // Set parameters
3521  mls.setComputeNormals (true);
3522  if(polygonialOrder > 0)
3523  {
3524 #if PCL_VERSION_COMPARE(<, 1, 10, 0)
3525  mls.setPolynomialFit (true);
3526 #endif
3527  mls.setPolynomialOrder(polygonialOrder);
3528  }
3529  else
3530  {
3531 #if PCL_VERSION_COMPARE(<, 1, 10, 0)
3532  mls.setPolynomialFit (false);
3533 #else
3534  mls.setPolynomialOrder(1);
3535 #endif
3536  }
3537  UASSERT(upsamplingMethod >= mls.NONE &&
3538  upsamplingMethod <= mls.VOXEL_GRID_DILATION);
3539  mls.setUpsamplingMethod((pcl::MovingLeastSquares<pcl::PointXYZRGB, pcl::PointXYZRGBNormal>::UpsamplingMethod)upsamplingMethod);
3540  mls.setSearchRadius(searchRadius);
3541  mls.setUpsamplingRadius(upsamplingRadius);
3542  mls.setUpsamplingStepSize(upsamplingStep);
3543  mls.setPointDensity(pointDensity);
3544  mls.setDilationVoxelSize(dilationVoxelSize);
3545  mls.setDilationIterations(dilationIterations);
3546 
3547  // Reconstruct
3548  mls.setInputCloud (cloud);
3549  if(indices->size())
3550  {
3551  mls.setIndices(indices);
3552  }
3553  mls.setSearchMethod (tree);
3554  mls.process (*cloud_with_normals);
3555 
3556  // It seems that returned normals are not normalized!? FIXME: Is it a bug only in PCL 1.7.1?
3557  for(unsigned int i=0; i<cloud_with_normals->size(); ++i)
3558  {
3559  Eigen::Vector3f normal(cloud_with_normals->at(i).normal_x, cloud_with_normals->at(i).normal_y, cloud_with_normals->at(i).normal_z);
3560  normal.normalize();
3561  cloud_with_normals->at(i).normal_x = normal[0];
3562  cloud_with_normals->at(i).normal_y = normal[1];
3563  cloud_with_normals->at(i).normal_z = normal[2];
3564  }
3565 
3566  return cloud_with_normals;
3567 }
3568 
3570  const LaserScan & scan,
3571  const Eigen::Vector3f & viewpoint,
3572  bool forceGroundNormalsUp)
3573 {
3574  return adjustNormalsToViewPoint(scan, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3575 }
3577  const LaserScan & scan,
3578  const Eigen::Vector3f & viewpoint,
3579  float groundNormalsUp)
3580 {
3581  if(scan.size() && !scan.is2d() && scan.hasNormals())
3582  {
3583  int nx = scan.getNormalsOffset();
3584  int ny = nx+1;
3585  int nz = ny+1;
3586  cv::Mat output = scan.data().clone();
3587  #pragma omp parallel for
3588  for(int j=0; j<scan.data().rows; ++j)
3589  {
3590  for(int i=0; i<scan.data().cols; ++i)
3591  {
3592  float * ptr = output.ptr<float>(j, i);
3593  if(uIsFinite(ptr[nx]) && uIsFinite(ptr[ny]) && uIsFinite(ptr[nz]))
3594  {
3595  Eigen::Vector3f v = viewpoint - Eigen::Vector3f(ptr[0], ptr[1], ptr[2]);
3596  Eigen::Vector3f n(ptr[nx], ptr[ny], ptr[nz]);
3597 
3598  float result = v.dot(n);
3599  if(result < 0
3600  || (groundNormalsUp>0.0f && ptr[nz] < -groundNormalsUp && ptr[2] < viewpoint[2])) // some far velodyne rays on road can have normals toward ground
3601  {
3602  //reverse normal
3603  ptr[nx] *= -1.0f;
3604  ptr[ny] *= -1.0f;
3605  ptr[nz] *= -1.0f;
3606  }
3607  }
3608  }
3609  }
3610  if(scan.angleIncrement() > 0.0f)
3611  {
3612  return LaserScan(output, scan.format(), scan.rangeMin(), scan.rangeMax(), scan.angleMin(), scan.angleMax(), scan.angleIncrement(), scan.localTransform());
3613  }
3614  else
3615  {
3616  return LaserScan(output, scan.maxPoints(), scan.rangeMax(), scan.format(), scan.localTransform());
3617  }
3618  }
3619  return scan;
3620 }
3621 
3622 template<typename PointNormalT>
3624  typename pcl::PointCloud<PointNormalT>::Ptr & cloud,
3625  const Eigen::Vector3f & viewpoint,
3626  float groundNormalsUp)
3627 {
3628  #pragma omp parallel for
3629  for(int i=0; i<(int)cloud->size(); ++i)
3630  {
3631  pcl::PointXYZ normal(cloud->points[i].normal_x, cloud->points[i].normal_y, cloud->points[i].normal_z);
3632  if(pcl::isFinite(normal))
3633  {
3634  Eigen::Vector3f v = viewpoint - cloud->points[i].getVector3fMap();
3635  Eigen::Vector3f n(normal.x, normal.y, normal.z);
3636 
3637  float result = v.dot(n);
3638  if(result < 0
3639  || (groundNormalsUp>0.0f && normal.z < -groundNormalsUp && cloud->points[i].z < viewpoint[2])) // some far velodyne rays on road can have normals toward ground
3640  {
3641  //reverse normal
3642  cloud->points[i].normal_x *= -1.0f;
3643  cloud->points[i].normal_y *= -1.0f;
3644  cloud->points[i].normal_z *= -1.0f;
3645  }
3646  }
3647  }
3648 }
3649 
3651  pcl::PointCloud<pcl::PointNormal>::Ptr & cloud,
3652  const Eigen::Vector3f & viewpoint,
3653  bool forceGroundNormalsUp)
3654 {
3655  adjustNormalsToViewPoint(cloud, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3656 }
3658  pcl::PointCloud<pcl::PointNormal>::Ptr & cloud,
3659  const Eigen::Vector3f & viewpoint,
3660  float groundNormalsUp)
3661 {
3662  adjustNormalsToViewPointImpl<pcl::PointNormal>(cloud, viewpoint, groundNormalsUp);
3663 }
3664 
3666  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
3667  const Eigen::Vector3f & viewpoint,
3668  bool forceGroundNormalsUp)
3669 {
3670  adjustNormalsToViewPoint(cloud, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3671 }
3673  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
3674  const Eigen::Vector3f & viewpoint,
3675  float groundNormalsUp)
3676 {
3677  adjustNormalsToViewPointImpl<pcl::PointXYZRGBNormal>(cloud, viewpoint, groundNormalsUp);
3678 }
3679 
3681  pcl::PointCloud<pcl::PointXYZINormal>::Ptr & cloud,
3682  const Eigen::Vector3f & viewpoint,
3683  bool forceGroundNormalsUp)
3684 {
3685  adjustNormalsToViewPoint(cloud, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3686 }
3688  pcl::PointCloud<pcl::PointXYZINormal>::Ptr & cloud,
3689  const Eigen::Vector3f & viewpoint,
3690  float groundNormalsUp)
3691 {
3692  adjustNormalsToViewPointImpl<pcl::PointXYZINormal>(cloud, viewpoint, groundNormalsUp);
3693 }
3694 
3695 template<typename PointT>
3697  const std::map<int, Transform> & poses,
3698  const std::vector<int> & cameraIndices,
3699  typename pcl::PointCloud<PointT>::Ptr & cloud,
3700  float groundNormalsUp)
3701 {
3702  if(poses.size() && cloud->size() == cameraIndices.size() && cloud->size())
3703  {
3704  #pragma omp parallel for
3705  for(int i=0; i<(int)cloud->size(); ++i)
3706  {
3707  pcl::PointXYZ normal(cloud->points[i].normal_x, cloud->points[i].normal_y, cloud->points[i].normal_z);
3708  if(pcl::isFinite(normal))
3709  {
3710  const Transform & p = poses.at(cameraIndices[i]);
3711  pcl::PointXYZ viewpoint(p.x(), p.y(), p.z());
3712  Eigen::Vector3f v = viewpoint.getVector3fMap() - cloud->points[i].getVector3fMap();
3713 
3714  Eigen::Vector3f n(normal.x, normal.y, normal.z);
3715 
3716  float result = v.dot(n);
3717  if(result < 0 ||
3718  (groundNormalsUp>0.0f && normal.z < -groundNormalsUp && cloud->points[i].z < viewpoint.z)) // some far velodyne rays on road can have normals toward ground)
3719  {
3720  //reverse normal
3721  cloud->points[i].normal_x *= -1.0f;
3722  cloud->points[i].normal_y *= -1.0f;
3723  cloud->points[i].normal_z *= -1.0f;
3724  }
3725  }
3726  }
3727  }
3728 }
3729 
3731  const std::map<int, Transform> & poses,
3732  const std::vector<int> & cameraIndices,
3733  pcl::PointCloud<pcl::PointNormal>::Ptr & cloud,
3734  float groundNormalsUp)
3735 {
3736  adjustNormalsToViewPointsImpl<pcl::PointNormal>(poses, cameraIndices, cloud, groundNormalsUp);
3737 }
3738 
3740  const std::map<int, Transform> & poses,
3741  const std::vector<int> & cameraIndices,
3742  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
3743  float groundNormalsUp)
3744 {
3745  adjustNormalsToViewPointsImpl<pcl::PointXYZRGBNormal>(poses, cameraIndices, cloud, groundNormalsUp);
3746 }
3747 
3749  const std::map<int, Transform> & poses,
3750  const std::vector<int> & cameraIndices,
3751  pcl::PointCloud<pcl::PointXYZINormal>::Ptr & cloud,
3752  float groundNormalsUp)
3753 {
3754  adjustNormalsToViewPointsImpl<pcl::PointXYZINormal>(poses, cameraIndices, cloud, groundNormalsUp);
3755 }
3756 
3757 template<typename PointT>
3759  const std::map<int, Transform> & poses,
3760  const pcl::PointCloud<pcl::PointXYZ>::Ptr & rawCloud,
3761  const std::vector<int> & rawCameraIndices,
3762  typename pcl::PointCloud<PointT>::Ptr & cloud,
3763  float groundNormalsUp)
3764 {
3765  if(poses.size() && rawCloud->size() && rawCloud->size() == rawCameraIndices.size() && cloud->size())
3766  {
3767  pcl::search::KdTree<pcl::PointXYZ>::Ptr rawTree (new pcl::search::KdTree<pcl::PointXYZ>);
3768  rawTree->setInputCloud (rawCloud);
3769 
3770  #pragma omp parallel for
3771  for(int i=0; i<(int)cloud->size(); ++i)
3772  {
3773  pcl::PointXYZ normal(cloud->points[i].normal_x, cloud->points[i].normal_y, cloud->points[i].normal_z);
3774  if(pcl::isFinite(normal))
3775  {
3776  std::vector<int> indices;
3777  std::vector<float> dist;
3778  rawTree->nearestKSearch(pcl::PointXYZ(cloud->points[i].x, cloud->points[i].y, cloud->points[i].z), 1, indices, dist);
3779  UASSERT(indices.size() == 1);
3780  if(indices.size() && indices[0]>=0)
3781  {
3782  const Transform & p = poses.at(rawCameraIndices[indices[0]]);
3783  pcl::PointXYZ viewpoint(p.x(), p.y(), p.z());
3784  Eigen::Vector3f v = viewpoint.getVector3fMap() - cloud->points[i].getVector3fMap();
3785 
3786  Eigen::Vector3f n(normal.x, normal.y, normal.z);
3787 
3788  float result = v.dot(n);
3789  if(result < 0 ||
3790  (groundNormalsUp>0.0f && normal.z < -groundNormalsUp && cloud->points[i].z < viewpoint.z)) // some far velodyne rays on road can have normals toward ground)
3791  {
3792  //reverse normal
3793  cloud->points[i].normal_x *= -1.0f;
3794  cloud->points[i].normal_y *= -1.0f;
3795  cloud->points[i].normal_z *= -1.0f;
3796  }
3797  }
3798  else
3799  {
3800  UWARN("Not found camera viewpoint for point %d", i);
3801  }
3802  }
3803  }
3804  }
3805 }
3806 
3808  const std::map<int, Transform> & poses,
3809  const pcl::PointCloud<pcl::PointXYZ>::Ptr & rawCloud,
3810  const std::vector<int> & rawCameraIndices,
3811  pcl::PointCloud<pcl::PointNormal>::Ptr & cloud,
3812  float groundNormalsUp)
3813 {
3814  adjustNormalsToViewPointsImpl<pcl::PointNormal>(poses, rawCloud, rawCameraIndices, cloud, groundNormalsUp);
3815 }
3816 
3818  const std::map<int, Transform> & poses,
3819  const pcl::PointCloud<pcl::PointXYZ>::Ptr & rawCloud,
3820  const std::vector<int> & rawCameraIndices,
3821  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
3822  float groundNormalsUp)
3823 {
3824  adjustNormalsToViewPointsImpl<pcl::PointXYZRGBNormal>(poses, rawCloud, rawCameraIndices, cloud, groundNormalsUp);
3825 }
3826 
3828  const std::map<int, Transform> & poses,
3829  const pcl::PointCloud<pcl::PointXYZ>::Ptr & rawCloud,
3830  const std::vector<int> & rawCameraIndices,
3831  pcl::PointCloud<pcl::PointXYZINormal>::Ptr & cloud,
3832  float groundNormalsUp)
3833 {
3834  adjustNormalsToViewPointsImpl<pcl::PointXYZINormal>(poses, rawCloud, rawCameraIndices, cloud, groundNormalsUp);
3835 }
3836 
3838  const std::map<int, Transform> & viewpoints,
3839  const LaserScan & rawScan,
3840  const std::vector<int> & viewpointIds,
3841  LaserScan & scan,
3842  float groundNormalsUp)
3843 {
3844  UDEBUG("poses=%d, rawCloud=%d, rawCameraIndices=%d, cloud=%d", (int)viewpoints.size(), (int)rawScan.size(), (int)viewpointIds.size(), (int)scan.size());
3845  if(viewpoints.size() && rawScan.size() && rawScan.size() == (int)viewpointIds.size() && scan.size() && scan.hasNormals())
3846  {
3847  pcl::PointCloud<pcl::PointXYZ>::Ptr rawCloud = util3d::laserScanToPointCloud(rawScan);
3848  pcl::search::KdTree<pcl::PointXYZ>::Ptr rawTree (new pcl::search::KdTree<pcl::PointXYZ>);
3849  rawTree->setInputCloud (rawCloud);
3850  #pragma omp parallel for
3851  for(int i=0; i<scan.size(); ++i)
3852  {
3853  pcl::PointNormal point = util3d::laserScanToPointNormal(scan, i);
3854  pcl::PointXYZ normal(point.normal_x, point.normal_y, point.normal_z);
3855  if(pcl::isFinite(normal))
3856  {
3857  std::vector<int> indices;
3858  std::vector<float> dist;
3859  rawTree->nearestKSearch(pcl::PointXYZ(point.x, point.y, point.z), 1, indices, dist);
3860  if(indices.size() && indices[0]>=0)
3861  {
3862  UASSERT_MSG(indices[0]<(int)viewpointIds.size(), uFormat("indices[0]=%d rawCameraIndices.size()=%d", indices[0], (int)viewpointIds.size()).c_str());
3863  UASSERT(uContains(viewpoints, viewpointIds[indices[0]]));
3864  Transform p = viewpoints.at(viewpointIds[indices[0]]);
3865  pcl::PointXYZ viewpoint(p.x(), p.y(), p.z());
3866  Eigen::Vector3f v = viewpoint.getVector3fMap() - point.getVector3fMap();
3867 
3868  Eigen::Vector3f n(normal.x, normal.y, normal.z);
3869 
3870  float result = v.dot(n);
3871  if(result < 0 ||
3872  (groundNormalsUp>0.0f && normal.z < -groundNormalsUp && point.z < viewpoint.z)) // some far velodyne rays on road can have normals toward ground))
3873  {
3874  //reverse normal
3875  scan.field(i, scan.getNormalsOffset()) *= -1.0f;
3876  scan.field(i, scan.getNormalsOffset()+1) *= -1.0f;
3877  scan.field(i, scan.getNormalsOffset()+2) *= -1.0f;
3878  }
3879  }
3880  else
3881  {
3882  UWARN("Not found camera viewpoint for point %d!?", i);
3883  }
3884  }
3885  }
3886  }
3887 }
3888 
3889 pcl::PolygonMesh::Ptr meshDecimation(const pcl::PolygonMesh::Ptr & mesh, float factor)
3890 {
3891  pcl::PolygonMesh::Ptr output(new pcl::PolygonMesh);
3892 #ifndef DISABLE_VTK
3893  pcl::MeshQuadricDecimationVTK mqd;
3894  mqd.setTargetReductionFactor(factor);
3895  mqd.setInputMesh(mesh);
3896  mqd.process (*output);
3897 #else
3898  UWARN("RTAB-Map is not built with VTK module so mesh decimation cannot be used!");
3899  *output = *mesh;
3900 #endif
3901  return output;
3902 }
3903 
3905  const Eigen::Vector3f & p,
3906  const Eigen::Vector3f & dir,
3907  const Eigen::Vector3f & v0,
3908  const Eigen::Vector3f & v1,
3909  const Eigen::Vector3f & v2,
3910  float & distance,
3911  Eigen::Vector3f & normal)
3912 {
3913  // get triangle edge cv::Vec3fs and plane normal
3914  const Eigen::Vector3f u = v1-v0;
3915  const Eigen::Vector3f v = v2-v0;
3916  normal = u.cross(v); // cross product
3917  if (normal == Eigen::Vector3f(0,0,0)) // triangle is degenerate
3918  return false; // do not deal with this case
3919 
3920  const float denomimator = normal.dot(dir);
3921  if (fabs(denomimator) < 10e-9) // ray is parallel to triangle plane
3922  return false;
3923 
3924  // get intersect of ray with triangle plane
3925  distance = normal.dot(v0 - p) / denomimator;
3926  if (distance < 0.0) // ray goes away from triangle
3927  return false;
3928 
3929  // is I inside T?
3930  float uu, uv, vv, wu, wv, D;
3931  uu = u.dot(u);
3932  uv = u.dot(v);
3933  vv = v.dot(v);
3934  const Eigen::Vector3f w = p + dir * distance - v0;
3935  wu = w.dot(u);
3936  wv = w.dot(v);
3937  D = uv * uv - uu * vv;
3938 
3939  // get and test parametric coords
3940  float s, t;
3941  s = (uv * wv - vv * wu) / D;
3942  if (s < 0.0 || s > 1.0) // I is outside T
3943  return false;
3944  t = (uv * wu - uu * wv) / D;
3945  if (t < 0.0 || (s + t) > 1.0) // I is outside T
3946  return false;
3947 
3948  return true; // I is in T
3949 }
3950 
3956  const std::string &file_name,
3957  const pcl::TextureMesh &tex_mesh,
3958  unsigned precision)
3959 {
3960  if (tex_mesh.cloud.data.empty ())
3961  {
3962  UERROR ("Input point cloud has no data!\n");
3963  return (-1);
3964  }
3965  // Open file
3966  std::ofstream fs;
3967  fs.precision (precision);
3968  fs.open (file_name.c_str ());
3969 
3970  // Define material file
3971  std::string mtl_file_name = file_name.substr (0, file_name.find_last_of ('.')) + ".mtl";
3972  // Strip path for "mtllib" command
3973  std::string mtl_file_name_nopath = mtl_file_name;
3974  mtl_file_name_nopath.erase (0, mtl_file_name.find_last_of ('/') + 1);
3975 
3976  /* Write 3D information */
3977  // number of points
3978  unsigned nr_points = tex_mesh.cloud.width * tex_mesh.cloud.height;
3979  unsigned point_size = static_cast<unsigned> (tex_mesh.cloud.data.size () / nr_points);
3980 
3981  // mesh size
3982  unsigned nr_meshes = static_cast<unsigned> (tex_mesh.tex_polygons.size ());
3983  // number of faces for header
3984  unsigned nr_faces = 0;
3985  for (unsigned m = 0; m < nr_meshes; ++m)
3986  nr_faces += static_cast<unsigned> (tex_mesh.tex_polygons[m].size ());
3987 
3988  // Write the header information
3989  fs << "####" << '\n';
3990  fs << "# OBJ dataFile simple version. File name: " << file_name << '\n';
3991  fs << "# Vertices: " << nr_points << '\n';
3992  fs << "# Faces: " <<nr_faces << '\n';
3993  fs << "# Material information:" << '\n';
3994  fs << "mtllib " << mtl_file_name_nopath << '\n';
3995  fs << "####" << '\n';
3996 
3997  // Write vertex coordinates
3998  fs << "# Vertices" << '\n';
3999  for (unsigned i = 0; i < nr_points; ++i)
4000  {
4001  int xyz = 0;
4002  // "v" just be written one
4003  bool v_written = false;
4004  for (std::size_t d = 0; d < tex_mesh.cloud.fields.size (); ++d)
4005  {
4006  // adding vertex
4007  if ((tex_mesh.cloud.fields[d].datatype == pcl::PCLPointField::FLOAT32) && (
4008  tex_mesh.cloud.fields[d].name == "x" ||
4009  tex_mesh.cloud.fields[d].name == "y" ||
4010  tex_mesh.cloud.fields[d].name == "z"))
4011  {
4012  if (!v_written)
4013  {
4014  // write vertices beginning with v
4015  fs << "v ";
4016  v_written = true;
4017  }
4018  float value;
4019  memcpy (&value, &tex_mesh.cloud.data[i * point_size + tex_mesh.cloud.fields[d].offset], sizeof (float));
4020  fs << value;
4021  if (++xyz == 3)
4022  continue;
4023  fs << " ";
4024  }
4025  else if(tex_mesh.cloud.fields[d].datatype == pcl::PCLPointField::FLOAT32 &&
4026  tex_mesh.cloud.fields[d].name == "rgb")
4027  {
4028  std::uint32_t rgb = *reinterpret_cast<const int*>(&tex_mesh.cloud.data[i * point_size + tex_mesh.cloud.fields[d].offset]);
4029  std::uint8_t r = (rgb >> 16) & 0x0000ff;
4030  std::uint8_t g = (rgb >> 8) & 0x0000ff;
4031  std::uint8_t b = (rgb) & 0x0000ff;
4032  fs << " " << float(r)/255.0f << " " << float(g)/255.0f << " " << float(b)/255.0f;
4033  break;
4034  }
4035  }
4036  if (xyz != 3)
4037  {
4038  UERROR ("Input point cloud has no XYZ data!\n");
4039  return (-2);
4040  }
4041  fs << '\n';
4042  }
4043  fs << "# "<< nr_points <<" vertices" << '\n';
4044 
4045  // Write vertex normals
4046  for (unsigned i = 0; i < nr_points; ++i)
4047  {
4048  int xyz = 0;
4049  // "vn" just be written one
4050  bool v_written = false;
4051  for (std::size_t d = 0; d < tex_mesh.cloud.fields.size (); ++d)
4052  {
4053  // adding vertex
4054  if ((tex_mesh.cloud.fields[d].datatype == pcl::PCLPointField::FLOAT32) && (
4055  tex_mesh.cloud.fields[d].name == "normal_x" ||
4056  tex_mesh.cloud.fields[d].name == "normal_y" ||
4057  tex_mesh.cloud.fields[d].name == "normal_z"))
4058  {
4059  if (!v_written)
4060  {
4061  // write vertices beginning with vn
4062  fs << "vn ";
4063  v_written = true;
4064  }
4065  float value;
4066  memcpy (&value, &tex_mesh.cloud.data[i * point_size + tex_mesh.cloud.fields[d].offset], sizeof (float));
4067  fs << value;
4068  if (++xyz == 3)
4069  break;
4070  fs << " ";
4071  }
4072  }
4073  if (xyz != 3)
4074  {
4075  UERROR ("Input point cloud has no normals!\n");
4076  return (-2);
4077  }
4078  fs << '\n';
4079  }
4080  // Write vertex texture with "vt" (adding latter)
4081 
4082  for (unsigned m = 0; m < nr_meshes; ++m)
4083  {
4084  fs << "# " << tex_mesh.tex_coordinates[m].size() << " vertex textures in submesh " << m << '\n';
4085  for (const auto &coordinate : tex_mesh.tex_coordinates[m])
4086  {
4087  fs << "vt ";
4088  fs << coordinate[0] << " " << coordinate[1] << '\n';
4089  }
4090  }
4091 
4092  unsigned f_idx = 0;
4093 
4094  // int idx_vt =0;
4095  for (unsigned m = 0; m < nr_meshes; ++m)
4096  {
4097  if (m > 0) f_idx += static_cast<unsigned> (tex_mesh.tex_polygons[m-1].size ());
4098 
4099  fs << "# The material will be used for mesh " << m << '\n';
4100  fs << "usemtl " << tex_mesh.tex_materials[m].tex_name << '\n';
4101  fs << "# Faces" << '\n';
4102 
4103  for (std::size_t i = 0; i < tex_mesh.tex_polygons[m].size(); ++i)
4104  {
4105  // Write faces with "f"
4106  fs << "f";
4107  // There's one UV per vertex per face, i.e., the same vertex can have
4108  // different UV depending on the face.
4109  for (std::size_t j = 0; j < tex_mesh.tex_polygons[m][i].vertices.size (); ++j)
4110  {
4111  std::uint32_t idx = tex_mesh.tex_polygons[m][i].vertices[j] + 1;
4112  fs << " " << idx
4113  << "/" << tex_mesh.tex_polygons[m][i].vertices.size () * (i+f_idx) +j+1
4114  << "/" << idx; // vertex index in obj file format starting with 1
4115  }
4116  fs << '\n';
4117  }
4118  fs << "# "<< tex_mesh.tex_polygons[m].size() << " faces in mesh " << m << '\n';
4119  }
4120  fs << "# End of File" << std::flush;
4121 
4122  // Close obj file
4123  fs.close ();
4124 
4125  /* Write material definition for OBJ file*/
4126  // Open file
4127 
4128  std::ofstream m_fs;
4129  m_fs.precision (precision);
4130  m_fs.open (mtl_file_name.c_str ());
4131 
4132  // default
4133  m_fs << "#" << '\n';
4134  m_fs << "# Wavefront material file" << '\n';
4135  m_fs << "#" << '\n';
4136  for(unsigned m = 0; m < nr_meshes; ++m)
4137  {
4138  m_fs << "newmtl " << tex_mesh.tex_materials[m].tex_name << '\n';
4139  m_fs << "Ka "<< tex_mesh.tex_materials[m].tex_Ka.r << " " << tex_mesh.tex_materials[m].tex_Ka.g << " " << tex_mesh.tex_materials[m].tex_Ka.b << '\n'; // defines the ambient color of the material to be (r,g,b).
4140  m_fs << "Kd "<< tex_mesh.tex_materials[m].tex_Kd.r << " " << tex_mesh.tex_materials[m].tex_Kd.g << " " << tex_mesh.tex_materials[m].tex_Kd.b << '\n'; // defines the diffuse color of the material to be (r,g,b).
4141  m_fs << "Ks "<< tex_mesh.tex_materials[m].tex_Ks.r << " " << tex_mesh.tex_materials[m].tex_Ks.g << " " << tex_mesh.tex_materials[m].tex_Ks.b << '\n'; // defines the specular color of the material to be (r,g,b). This color shows up in highlights.
4142  m_fs << "d " << tex_mesh.tex_materials[m].tex_d << '\n'; // defines the transparency of the material to be alpha.
4143  m_fs << "Ns "<< tex_mesh.tex_materials[m].tex_Ns << '\n'; // defines the shininess of the material to be s.
4144  m_fs << "illum "<< tex_mesh.tex_materials[m].tex_illum << '\n'; // denotes the illumination model used by the material.
4145  // illum = 1 indicates a flat material with no specular highlights, so the value of Ks is not used.
4146  // illum = 2 denotes the presence of specular highlights, and so a specification for Ks is required.
4147  m_fs << "map_Kd " << tex_mesh.tex_materials[m].tex_file << '\n';
4148  m_fs << "###" << '\n';
4149  }
4150  m_fs.close ();
4151  return (0);
4152 }
4153 
4159  const std::string &file_name,
4160  const pcl::PolygonMesh &mesh,
4161  unsigned precision)
4162 {
4163  if (mesh.cloud.data.empty ())
4164  {
4165  UERROR ("Input point cloud has no data!\n");
4166  return (-1);
4167  }
4168  // Open file
4169  std::ofstream fs;
4170  fs.precision (precision);
4171  fs.open (file_name.c_str ());
4172 
4173  /* Write 3D information */
4174  // number of points
4175  int nr_points = mesh.cloud.width * mesh.cloud.height;
4176  // point size
4177  unsigned point_size = static_cast<unsigned> (mesh.cloud.data.size () / nr_points);
4178  // number of faces for header
4179  unsigned nr_faces = static_cast<unsigned> (mesh.polygons.size ());
4180  // Do we have vertices normals?
4181  int normal_index = getFieldIndex (mesh.cloud, "normal_x");
4182 
4183  // Write the header information
4184  fs << "####" << '\n';
4185  fs << "# OBJ dataFile simple version. File name: " << file_name << '\n';
4186  fs << "# Vertices: " << nr_points << '\n';
4187  if (normal_index != -1)
4188  fs << "# Vertices normals : " << nr_points << '\n';
4189  fs << "# Faces: " <<nr_faces << '\n';
4190  fs << "####" << '\n';
4191 
4192  // Write vertex coordinates
4193  fs << "# List of Vertices, with (x,y,z) coordinates, w is optional." << '\n';
4194  for (int i = 0; i < nr_points; ++i)
4195  {
4196  int xyz = 0;
4197  for (std::size_t d = 0; d < mesh.cloud.fields.size (); ++d)
4198  {
4199  // adding vertex
4200  if ((mesh.cloud.fields[d].datatype == pcl::PCLPointField::FLOAT32) && (
4201  mesh.cloud.fields[d].name == "x" ||
4202  mesh.cloud.fields[d].name == "y" ||
4203  mesh.cloud.fields[d].name == "z"))
4204  {
4205  if (mesh.cloud.fields[d].name == "x")
4206  // write vertices beginning with v
4207  fs << "v ";
4208 
4209  float value;
4210  memcpy (&value, &mesh.cloud.data[i * point_size + mesh.cloud.fields[d].offset], sizeof (float));
4211  fs << value;
4212  if (++xyz == 3)
4213  continue;
4214  fs << " ";
4215  }
4216  else if(mesh.cloud.fields[d].datatype == pcl::PCLPointField::FLOAT32 &&
4217  mesh.cloud.fields[d].name == "rgb")
4218  {
4219  std::uint32_t rgb = *reinterpret_cast<const int*>(&mesh.cloud.data[i * point_size + mesh.cloud.fields[d].offset]);
4220  std::uint8_t r = (rgb >> 16) & 0x0000ff;
4221  std::uint8_t g = (rgb >> 8) & 0x0000ff;
4222  std::uint8_t b = (rgb) & 0x0000ff;
4223  fs << " " << float(r)/255.0f << " " << float(g)/255.0f << " " << float(b)/255.0f;
4224  break;
4225  }
4226  }
4227  if (xyz != 3)
4228  {
4229  UERROR ("Input point cloud has no XYZ data!\n");
4230  return (-2);
4231  }
4232  fs << '\n';
4233  }
4234 
4235  fs << "# "<< nr_points <<" vertices" << '\n';
4236 
4237  if(normal_index != -1)
4238  {
4239  fs << "# Normals in (x,y,z) form; normals might not be unit." << '\n';
4240  // Write vertex normals
4241  for (int i = 0; i < nr_points; ++i)
4242  {
4243  int nxyz = 0;
4244  for (std::size_t d = 0; d < mesh.cloud.fields.size (); ++d)
4245  {
4246  // adding vertex
4247  if ((mesh.cloud.fields[d].datatype == pcl::PCLPointField::FLOAT32) && (
4248  mesh.cloud.fields[d].name == "normal_x" ||
4249  mesh.cloud.fields[d].name == "normal_y" ||
4250  mesh.cloud.fields[d].name == "normal_z"))
4251  {
4252  if (mesh.cloud.fields[d].name == "normal_x")
4253  // write vertices beginning with vn
4254  fs << "vn ";
4255 
4256  float value;
4257  memcpy (&value, &mesh.cloud.data[i * point_size + mesh.cloud.fields[d].offset], sizeof (float));
4258  fs << value;
4259  if (++nxyz == 3)
4260  break;
4261  fs << " ";
4262  }
4263  }
4264  if (nxyz != 3)
4265  {
4266  UERROR ("Input point cloud has no normals!\n");
4267  return (-2);
4268  }
4269  fs << '\n';
4270  }
4271 
4272  fs << "# "<< nr_points <<" vertices normals" << '\n';
4273  }
4274 
4275  fs << "# Face Definitions" << '\n';
4276  // Write down faces
4277  if(normal_index == -1)
4278  {
4279  for(unsigned i = 0; i < nr_faces; i++)
4280  {
4281  fs << "f ";
4282  for (std::size_t j = 0; j < mesh.polygons[i].vertices.size () - 1; ++j)
4283  fs << mesh.polygons[i].vertices[j] + 1 << " ";
4284  fs << mesh.polygons[i].vertices.back() + 1 << '\n';
4285  }
4286  }
4287  else
4288  {
4289  for(unsigned i = 0; i < nr_faces; i++)
4290  {
4291  fs << "f ";
4292  for (std::size_t j = 0; j < mesh.polygons[i].vertices.size () - 1; ++j)
4293  fs << mesh.polygons[i].vertices[j] + 1 << "//" << mesh.polygons[i].vertices[j] + 1 << " ";
4294  fs << mesh.polygons[i].vertices.back() + 1 << "//" << mesh.polygons[i].vertices.back() + 1 << '\n';
4295  }
4296  }
4297  fs << "# End of File" << std::endl;
4298 
4299  // Close obj file
4300  fs.close ();
4301  return 0;
4302 }
4303 
4304 }
4305 
4306 }
rtabmap::SensorData
Definition: SensorData.h:51
w
RowVector3d w
int
int
rtabmap::LaserScan::localTransform
Transform localTransform() const
Definition: LaserScan.h:127
rtabmap::util3d::sqr
double sqr(uchar v)
Definition: util3d_surface.cpp:1433
reference
reference
Compression.h
rtabmap::DBDriver::getNodeData
void getNodeData(int signatureId, SensorData &data, bool images=true, bool scan=true, bool userData=true, bool occupancyGrid=true) const
Definition: DBDriver.cpp:686
rtabmap::DBDriver::getCalibration
bool getCalibration(int signatureId, std::vector< CameraModel > &models, std::vector< StereoCameraModel > &stereoModels) const
Definition: DBDriver.cpp:737
UFile::getName
std::string getName()
Definition: UFile.h:135
rtabmap::util3d::computeNormals2DImpl
pcl::PointCloud< pcl::Normal >::Ptr computeNormals2DImpl(const typename pcl::PointCloud< PointT >::Ptr &cloud, int searchK, float searchRadius, const Eigen::Vector3f &viewPoint)
Definition: util3d_surface.cpp:2917
rtabmap::uncompressImage
cv::Mat RTABMAP_CORE_EXPORT uncompressImage(const cv::Mat &bytes)
Definition: Compression.cpp:144
UINFO
#define UINFO(...)
rtabmap::util3d::createPolygonIndexes
void RTABMAP_CORE_EXPORT createPolygonIndexes(const std::vector< pcl::Vertices > &polygons, int cloudSize, std::vector< std::set< int > > &neighborPolygons, std::vector< std::set< int > > &vertexPolygons)
Given a set of polygons, create two indexes: polygons to neighbor polygons and vertices to polygons.
Definition: util3d_surface.cpp:94
uMean
T uMean(const T *v, unsigned int size)
Definition: UMath.h:399
UFile::erase
static int erase(const std::string &filePath)
Definition: UFile.cpp:58
texture_mapping.h
D
MatrixXcd D
util3d_surface.h
rtabmap::util3d::adjustNormalsToViewPointsImpl
void adjustNormalsToViewPointsImpl(const std::map< int, Transform > &poses, const pcl::PointCloud< pcl::PointXYZ >::Ptr &rawCloud, const std::vector< int > &rawCameraIndices, typename pcl::PointCloud< PointT >::Ptr &cloud, float groundNormalsUp)
Definition: util3d_surface.cpp:3758
rtabmap::util3d::filterInvalidPolygons
std::vector< pcl::Vertices > RTABMAP_CORE_EXPORT filterInvalidPolygons(const std::vector< pcl::Vertices > &polygons)
Definition: util3d_surface.cpp:531
v1
v1
rtabmap::LaserScan::maxPoints
int maxPoints() const
Definition: LaserScan.h:120
UDirectory::getFileNames
const std::list< std::string > & getFileNames() const
Definition: UDirectory.h:129
UDirectory::makeDir
static bool makeDir(const std::string &dirPath)
Definition: UDirectory.cpp:333
cameras
std::array< PinholeCamera< Cal3_S2 >, 3 > cameras
pcl::TextureMapping::setMaxDepthError
void setMaxDepthError(float maxDepthError)
Definition: texture_mapping.h:181
alpha
RealScalar alpha
rtabmap::LaserScan::rangeMax
float rangeMax() const
Definition: LaserScan.h:122
timer
s
RealScalar s
rtabmap::LaserScan::hasRGB
bool hasRGB() const
Definition: LaserScan.h:135
rtabmap::util3d::createTextureCameras
pcl::texture_mapping::CameraVector createTextureCameras(const std::map< int, Transform > &poses, const std::map< int, std::vector< CameraModel > > &cameraModels, const std::map< int, cv::Mat > &cameraDepths, const std::vector< float > &roiRatios)
Definition: util3d_surface.cpp:598
b
Array< int, 3, 1 > b
rtabmap::util3d::cleanTextureMesh
void RTABMAP_CORE_EXPORT cleanTextureMesh(pcl::TextureMesh &textureMesh, int minClusterSize)
Definition: util3d_surface.cpp:873
rtflann::uchar
unsigned char uchar
Definition: matrix.h:69
uSum
T uSum(const std::list< T > &list)
Definition: UMath.h:318
rtabmap::util3d::removeNaNNormalsFromPointCloud
pcl::PointCloud< pcl::PointNormal >::Ptr RTABMAP_CORE_EXPORT removeNaNNormalsFromPointCloud(const pcl::PointCloud< pcl::PointNormal >::Ptr &cloud)
Definition: util3d_filtering.cpp:1218
rtabmap::Memory::getNodeInfo
bool getNodeInfo(int signatureId, Transform &odomPose, int &mapId, int &weight, std::string &label, double &stamp, Transform &groundTruth, std::vector< float > &velocity, GPS &gps, EnvSensors &sensors, bool lookInDatabase=false) const
Definition: Memory.cpp:4093
tree
UDirectory.h
size
Index size
rtabmap::CameraModel
Definition: CameraModel.h:38
pcl::TextureMapping::setMaxDistance
void setMaxDistance(float maxDistance)
Definition: texture_mapping.h:175
rtabmap::util3d::createTextureMesh
pcl::TextureMesh::Ptr RTABMAP_CORE_EXPORT createTextureMesh(const pcl::PolygonMesh::Ptr &mesh, const std::map< int, Transform > &poses, const std::map< int, CameraModel > &cameraModels, const std::map< int, cv::Mat > &cameraDepths, float maxDistance=0.0f, float maxDepthError=0.0f, float maxAngle=0.0f, int minClusterSize=50, const std::vector< float > &roiRatios=std::vector< float >(), const ProgressState *state=0, std::vector< std::map< int, pcl::PointXY > > *vertexToPixels=0, bool distanceToCamPolicy=false)
Definition: util3d_surface.cpp:675
rtabmap::util3d::appendMesh
void RTABMAP_CORE_EXPORT appendMesh(pcl::PointCloud< pcl::PointXYZRGBNormal > &cloudA, std::vector< pcl::Vertices > &polygonsA, const pcl::PointCloud< pcl::PointXYZRGBNormal > &cloudB, const std::vector< pcl::Vertices > &polygonsB)
Definition: util3d_surface.cpp:283
rtabmap::LaserScan::data
const cv::Mat & data() const
Definition: LaserScan.h:116
organized_fast_mesh.h
count
Index count
rtabmap::GPS
Definition: GPS.h:35
rtabmap::LaserScan::getNormalsOffset
int getNormalsOffset() const
Definition: LaserScan.h:145
ULogger::kDebug
@ kDebug
Definition: ULogger.h:252
col
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE ColXpr col(Index i) const
rtabmap_netvlad.img
img
Definition: rtabmap_netvlad.py:78
state
RecoveryProgressState state
Definition: tools/Recovery/main.cpp:56
pcl::OrganizedFastMesh::setViewpoint
void setViewpoint(const Eigen::Vector3f &viewpoint)
Set the viewpoint from where the input point cloud has been acquired.
Definition: organized_fast_mesh.h:178
rtabmap::util3d::assembleTextureMesh
pcl::TextureMesh::Ptr RTABMAP_CORE_EXPORT assembleTextureMesh(const cv::Mat &cloudMat, const std::vector< std::vector< std::vector< RTABMAP_PCL_INDEX > > > &polygons, const std::vector< std::vector< Eigen::Vector2f > > &texCoords, cv::Mat &textures, bool mergeTextures=false)
Definition: util3d_surface.cpp:1272
rtabmap::util3d::clusterPolygons
std::list< std::list< int > > RTABMAP_CORE_EXPORT clusterPolygons(const std::vector< std::set< int > > &neighborPolygons, int minClusterSize=0)
Definition: util3d_surface.cpp:131
rtabmap::util3d::adjustNormalsToViewPoint
RTABMAP_DEPRECATED LaserScan RTABMAP_CORE_EXPORT adjustNormalsToViewPoint(const LaserScan &scan, const Eigen::Vector3f &viewpoint, bool forceGroundNormalsUp)
Definition: util3d_surface.cpp:3569
y
Matrix3f y
uIsInteger
bool uIsInteger(const std::string &str, bool checkForSign=true)
Definition: UStl.h:630
rtabmap::util2d::brightnessAndContrastAuto
cv::Mat RTABMAP_CORE_EXPORT brightnessAndContrastAuto(const cv::Mat &src, const cv::Mat &mask, float clipLowHistPercent=0, float clipHighHistPercent=0, float *alphaOut=0, float *betaOut=0)
Automatic brightness and contrast optimization with optional histogram clipping.
Definition: util2d.cpp:1942
rtabmap::util3d::convertPolygonsFromPCL
std::vector< std::vector< RTABMAP_PCL_INDEX > > RTABMAP_CORE_EXPORT convertPolygonsFromPCL(const std::vector< pcl::Vertices > &polygons)
Definition: util3d_surface.cpp:1227
iterator
rtabmap::util3d::computeFastOrganizedNormals2DImpl
pcl::PointCloud< pcl::Normal >::Ptr computeFastOrganizedNormals2DImpl(const typename pcl::PointCloud< PointT >::Ptr &cloud, int searchK, float searchRadius, const Eigen::Vector3f &viewPoint)
Definition: util3d_surface.cpp:3008
rtabmap::LaserScan::hasNormals
bool hasNormals() const
Definition: LaserScan.h:134
uVariance
T uVariance(const T *v, unsigned int size, T meanV)
Definition: UMath.h:489
rtabmap::util3d::concatenate
pcl::IndicesPtr RTABMAP_CORE_EXPORT concatenate(const std::vector< pcl::IndicesPtr > &indices)
Concatenate a vector of indices to a single vector.
Definition: util3d.cpp:3481
h
const double h
last
static const symbolic::SymbolExpr< internal::symbolic_last_tag > last
util3d.h
rtabmap::LaserScan
Definition: LaserScan.h:37
beta
double beta(double a, double b)
rtabmap::util3d::laserScanFromPointCloud
LaserScan laserScanFromPointCloud(const PointCloud2T &cloud, bool filterNaNs, bool is2D, const Transform &transform)
Definition: util3d.hpp:37
rows
int rows
UTimer.h
rtabmap::maxDepthError
const float maxDepthError
Definition: CameraTango.cpp:43
rtabmap::LaserScan::size
int size() const
Definition: LaserScan.h:131
cam
rtabmap::SensorCaptureThread * cam
Definition: tools/DataRecorder/main.cpp:58
rtabmap::LaserScan::isEmpty
bool isEmpty() const
Definition: LaserScan.h:130
UMath.h
Basic mathematics functions.
indices
indices
rtabmap::util3d::organizedFastMesh
std::vector< pcl::Vertices > RTABMAP_CORE_EXPORT organizedFastMesh(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, double angleTolerance, bool quad, int trianglePixelSize, const Eigen::Vector3f &viewpoint=Eigen::Vector3f(0, 0, 0))
Definition: util3d_surface.cpp:168
factor
DecisionTreeFactor factor(D &C &B &A, "0.0 0.0 0.0 0.60658897 0.61241912 0.61241969 0.61247685 0.61247742 0.0 " "0.0 0.0 0.99995287 1.0 1.0 1.0 1.0")
rtabmap::util3d::computeNormalsImpl
pcl::PointCloud< pcl::Normal >::Ptr computeNormalsImpl(const typename pcl::PointCloud< PointT >::Ptr &cloud, const pcl::IndicesPtr &indices, int searchK, float searchRadius, const Eigen::Vector3f &viewPoint)
Definition: util3d_surface.cpp:2823
fabs
Real fabs(const Real &a)
pcl::TextureMapping
The texture mapping algorithm.
Definition: texture_mapping.h:105
n
int n
glm::intersectRayTriangle
GLM_FUNC_DECL bool intersectRayTriangle(genType const &orig, genType const &dir, genType const &vert0, genType const &vert1, genType const &vert2, genType &baryPosition)
rtabmap::LaserScan::format
Format format() const
Definition: LaserScan.h:117
rtabmap::util3d::adjustNormalsToViewPoints
void RTABMAP_CORE_EXPORT adjustNormalsToViewPoints(const std::map< int, Transform > &poses, const std::vector< int > &cameraIndices, pcl::PointCloud< pcl::PointNormal >::Ptr &cloud, float groundNormalsUp=0.0f)
Definition: util3d_surface.cpp:3730
pcl::OrganizedFastMesh::setAngleTolerance
void setAngleTolerance(float angle_tolerance)
Set the angle tolerance used for checking whether or not an edge is occluded. Standard values are 5de...
Definition: organized_fast_mesh.h:204
rtabmap::util3d::transformPoint
cv::Point3f RTABMAP_CORE_EXPORT transformPoint(const cv::Point3f &pt, const Transform &transform)
Definition: util3d_transforms.cpp:211
rtabmap::util3d::computeNormalsComplexity
float RTABMAP_CORE_EXPORT computeNormalsComplexity(const LaserScan &scan, const Transform &t=Transform::getIdentity(), cv::Mat *pcaEigenVectors=0, cv::Mat *pcaEigenValues=0)
Definition: util3d_surface.cpp:3167
point
point
uContains
bool uContains(const std::list< V > &list, const V &value)
Definition: UStl.h:407
rtabmap::LaserScan::field
float & field(unsigned int pointIndex, unsigned int channelOffset)
Definition: LaserScan.cpp:446
pcl::OrganizedFastMesh
Simple triangulation/surface reconstruction for organized point clouds. Neighboring points (pixels in...
Definition: organized_fast_mesh.h:65
glm::uint32_t
detail::uint32 uint32_t
Definition: fwd.hpp:916
A
data
int data[]
Eigen::aligned_allocator
UDirectory::removeDir
static bool removeDir(const std::string &dirPath)
Definition: UDirectory.cpp:344
util3d_transforms.h
coordinate
string coordinate
gtsam::Values::end
deref_iterator end() const
UDirectory::getDir
static std::string getDir(const std::string &filePath)
Definition: UDirectory.cpp:273
scale
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy y set format x g set format y g set format x2 g set format y2 g set format z g set angles radians set nogrid set key title set key left top Right noreverse box linetype linewidth samplen spacing width set nolabel set noarrow set nologscale set logscale x set set pointsize set encoding default set nopolar set noparametric set set set set surface set nocontour set clabel set mapping cartesian set nohidden3d set cntrparam order set cntrparam linear set cntrparam levels auto set cntrparam points set size set set xzeroaxis lt lw set x2zeroaxis lt lw set yzeroaxis lt lw set y2zeroaxis lt lw set tics in set ticslevel set tics scale
g
float g
j
std::ptrdiff_t j
uNumber2Str
std::string UTILITE_EXPORT uNumber2Str(unsigned int number)
Definition: UConversion.cpp:91
vertices
static const float vertices[]
Definition: quad_color.cpp:20
rtabmap::util3d::computeFastOrganizedNormals
pcl::PointCloud< pcl::Normal >::Ptr RTABMAP_CORE_EXPORT computeFastOrganizedNormals(const pcl::PointCloud< pcl::PointXYZRGB >::Ptr &cloud, float maxDepthChangeFactor=0.02f, float normalSmoothingSize=10.0f, const Eigen::Vector3f &viewPoint=Eigen::Vector3f(0, 0, 0))
Definition: util3d_surface.cpp:3119
UConversion.h
Some conversion functions.
glm::sqrt
GLM_FUNC_DECL vecType< T, P > sqrt(vecType< T, P > const &x)
UDirectory::getNextFilePath
std::string getNextFilePath()
Definition: UDirectory.cpp:232
util3d_filtering.h
glm::pi
GLM_FUNC_DECL genType pi()
rtabmap::util3d::laserScanToPointCloudRGB
pcl::PointCloud< pcl::PointXYZRGB >::Ptr RTABMAP_CORE_EXPORT laserScanToPointCloudRGB(const LaserScan &laserScan, const Transform &transform=Transform(), unsigned char r=100, unsigned char g=100, unsigned char b=100)
Definition: util3d.cpp:2342
rtabmap::LaserScan::is2d
bool is2d() const
Definition: LaserScan.h:133
all
static const Eigen::internal::all_t all
first
constexpr int first(int i)
ULogger::kWarning
@ kWarning
Definition: ULogger.h:252
rtabmap::util3d::gcd
int gcd(int a, int b)
Definition: util3d_surface.cpp:1075
ULogger::kInfo
@ kInfo
Definition: ULogger.h:252
glm::row
GLM_FUNC_DECL genType::row_type row(genType const &m, length_t const &index)
rtabmap::util3d::laserScan2dFromPointCloud
LaserScan RTABMAP_CORE_EXPORT laserScan2dFromPointCloud(const pcl::PointCloud< pcl::PointXYZ > &cloud, const Transform &transform=Transform(), bool filterNaNs=true)
Definition: util3d.cpp:2012
rtabmap::ProgressState::isCanceled
bool isCanceled() const
Definition: ProgressState.h:51
UASSERT
#define UASSERT(condition)
d
d
rtabmap::ProgressState
Definition: ProgressState.h:35
z
z
RecoveryProgressState::callback
virtual bool callback(const std::string &msg) const
Definition: tools/Recovery/main.cpp:49
x
x
m
Matrix3f m
p
Point3_ p(2)
rtabmap::util3d::concatenateTextureMaterials
void RTABMAP_CORE_EXPORT concatenateTextureMaterials(pcl::TextureMesh &mesh, const cv::Size &imageSize, int textureSize, int maxTextures, float &scale, std::vector< bool > *materialsKept=0)
Definition: util3d_surface.cpp:1079
rtabmap::util3d::filterNotUsedVerticesFromMesh
std::vector< int > RTABMAP_CORE_EXPORT filterNotUsedVerticesFromMesh(const pcl::PointCloud< pcl::PointXYZRGBNormal > &cloud, const std::vector< pcl::Vertices > &polygons, pcl::PointCloud< pcl::PointXYZRGBNormal > &outputCloud, std::vector< pcl::Vertices > &outputPolygons)
Definition: util3d_surface.cpp:335
rtabmap::Memory::getNodeData
SensorData getNodeData(int locationId, bool images, bool scan, bool userData, bool occupancyGrid) const
Definition: Memory.cpp:4143
DBDriver.h
glm::angle
GLM_FUNC_DECL T angle(detail::tquat< T, P > const &x)
rtabmap::util3d::saveOBJFile
int RTABMAP_CORE_EXPORT saveOBJFile(const std::string &file_name, const pcl::TextureMesh &tex_mesh, unsigned precision=5)
Definition: util3d_surface.cpp:3955
rtabmap::util3d::meshDecimation
pcl::PolygonMesh::Ptr RTABMAP_CORE_EXPORT meshDecimation(const pcl::PolygonMesh::Ptr &mesh, float factor)
Definition: util3d_surface.cpp:3889
uStrContains
bool uStrContains(const std::string &string, const std::string &substring)
Definition: UStl.h:785
rtabmap::util3d::laserScanToPointCloudI
pcl::PointCloud< pcl::PointXYZI >::Ptr RTABMAP_CORE_EXPORT laserScanToPointCloudI(const LaserScan &laserScan, const Transform &transform=Transform(), float intensity=0.0f)
Definition: util3d.cpp:2369
normal
Unit3_ normal(const OrientedPlane3_ &p)
anyset::size
size_t size() const
rtabmap::util3d::computeNormals2D
pcl::PointCloud< pcl::Normal >::Ptr RTABMAP_CORE_EXPORT computeNormals2D(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, int searchK=5, float searchRadius=0.0f, const Eigen::Vector3f &viewPoint=Eigen::Vector3f(0, 0, 0))
Definition: util3d_surface.cpp:2990
rtabmap::LaserScan::angleMax
float angleMax() const
Definition: LaserScan.h:124
rtabmap::util3d::laserScanToPointNormal
pcl::PointNormal RTABMAP_CORE_EXPORT laserScanToPointNormal(const LaserScan &laserScan, int index)
Definition: util3d.cpp:2463
rtabmap::util3d::multiBandTexturing
RTABMAP_DEPRECATED bool RTABMAP_CORE_EXPORT multiBandTexturing(const std::string &outputOBJPath, const pcl::PCLPointCloud2 &cloud, const std::vector< pcl::Vertices > &polygons, const std::map< int, Transform > &cameraPoses, const std::vector< std::map< int, pcl::PointXY > > &vertexToPixels, const std::map< int, cv::Mat > &images, const std::map< int, std::vector< CameraModel > > &cameraModels, const Memory *memory=0, const DBDriver *dbDriver=0, int textureSize=8192, const std::string &textureFormat="jpg", const std::map< int, std::map< int, cv::Vec4d > > &gains=std::map< int, std::map< int, cv::Vec4d > >(), const std::map< int, std::map< int, cv::Mat > > &blendingGains=std::map< int, std::map< int, cv::Mat > >(), const std::pair< float, float > &contrastValues=std::pair< float, float >(0, 0), bool gainRGB=true)
Definition: util3d_surface.cpp:2301
pcl::OrganizedFastMesh::setTriangulationType
void setTriangulationType(TriangulationType type)
Set the triangulation type (see TriangulationType)
Definition: organized_fast_mesh.h:170
glm::precision
precision
Definition: precision.hpp:33
rtabmap::util3d::filterNaNPointsFromMesh
std::vector< int > RTABMAP_CORE_EXPORT filterNaNPointsFromMesh(const pcl::PointCloud< pcl::PointXYZRGB > &cloud, const std::vector< pcl::Vertices > &polygons, pcl::PointCloud< pcl::PointXYZRGB > &outputCloud, std::vector< pcl::Vertices > &outputPolygons)
Definition: util3d_surface.cpp:415
UASSERT_MSG
#define UASSERT_MSG(condition, msg_str)
Definition: ULogger.h:67
glm::uint8_t
detail::uint8 uint8_t
Definition: fwd.hpp:908
tango_gl::kIndices
static const GLushort kIndices[]
Definition: video_overlay.cpp:28
UWARN
#define UWARN(...)
pcl::TextureMapping::textureMeshwithMultipleCameras2
bool textureMeshwithMultipleCameras2(pcl::TextureMesh &mesh, const pcl::texture_mapping::CameraVector &cameras, const rtabmap::ProgressState *callback=0, std::vector< std::map< int, pcl::PointXY > > *vertexToPixels=0, bool distanceToCamPolicy=false)
Definition: texture_mapping.hpp:1049
f
Point2(* f)(const Point3 &, OptionalJacobian< 2, 3 >)
rtabmap::DBDriver
Definition: DBDriver.h:62
gtsam::Values::begin
deref_iterator begin() const
uFormat
std::string UTILITE_EXPORT uFormat(const char *fmt,...)
Definition: UConversion.cpp:365
ULogger::level
static ULogger::Level level()
Definition: ULogger.h:340
kdtree
kdtree
cameraPoses
std::array< Pose3, 3 > cameraPoses
rtabmap::util3d::computeNormals
LaserScan computeNormals(const LaserScan &laserScan, int searchK, float searchRadius)
Definition: util3d_surface.cpp:2748
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
rtabmap::util3d::mls
pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr RTABMAP_CORE_EXPORT mls(const pcl::PointCloud< pcl::PointXYZRGB >::Ptr &cloud, float searchRadius=0.0f, int polygonialOrder=2, int upsamplingMethod=0, float upsamplingRadius=0.0f, float upsamplingStep=0.0f, int pointDensity=0, float dilationVoxelSize=1.0f, int dilationIterations=0)
Definition: util3d_surface.cpp:3470
rtabmap::util3d::createMesh
pcl::PolygonMesh::Ptr RTABMAP_CORE_EXPORT createMesh(const pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr &cloudWithNormals, float gp3SearchRadius=0.025, float gp3Mu=2.5, int gp3MaximumNearestNeighbors=100, float gp3MaximumSurfaceAngle=M_PI/4, float gp3MinimumAngle=M_PI/18, float gp3MaximumAngle=2 *M_PI/3, bool gp3NormalConsistency=true)
Definition: util3d_surface.cpp:555
ULogger.h
ULogger class and convenient macros.
a
ArrayXXi a
rtabmap::util3d::assemblePolygonMesh
pcl::PolygonMesh::Ptr RTABMAP_CORE_EXPORT assemblePolygonMesh(const cv::Mat &cloudMat, const std::vector< std::vector< RTABMAP_PCL_INDEX > > &polygons)
Definition: util3d_surface.cpp:1395
rtabmap::Transform
Definition: Transform.h:41
Memory.h
util2d.h
uSplit
std::list< std::string > uSplit(const std::string &str, char separator=' ')
Definition: UStl.h:564
rtabmap::EnvSensors
std::map< EnvSensor::Type, EnvSensor > EnvSensors
Definition: EnvSensor.h:81
uStr2Int
int UTILITE_EXPORT uStr2Int(const std::string &str)
Definition: UConversion.cpp:125
glm::rotation
GLM_FUNC_DECL detail::tquat< T, P > rotation(detail::tvec3< T, P > const &orig, detail::tvec3< T, P > const &dest)
rtabmap::DBDriver::getWeight
void getWeight(int signatureId, int &weight) const
Definition: DBDriver.cpp:860
UDirectory::setPath
void setPath(const std::string &path, const std::string &extensions="")
Definition: UDirectory.cpp:95
values
leaf::MyValues values
N
N
iter
iterator iter(handle obj)
pcl::TextureMapping::setMinClusterSize
void setMinClusterSize(int size)
Definition: texture_mapping.h:193
pcl::texture_mapping::Camera
Structure to store camera pose and focal length.
Definition: texture_mapping.h:68
rtabmap::Transform::isIdentity
bool isIdentity() const
Definition: Transform.cpp:136
rtabmap::util3d::mergeTextures
cv::Mat RTABMAP_CORE_EXPORT mergeTextures(pcl::TextureMesh &mesh, const std::map< int, cv::Mat > &images, const std::map< int, CameraModel > &calibrations, const Memory *memory=0, const DBDriver *dbDriver=0, int textureSize=4096, int textureCount=1, const std::vector< std::map< int, pcl::PointXY > > &vertexToPixels=std::vector< std::map< int, pcl::PointXY > >(), bool gainCompensation=true, float gainBeta=10.0f, bool gainRGB=true, bool blending=true, int blendingDecimation=0, int brightnessContrastRatioLow=0, int brightnessContrastRatioHigh=0, bool exposureFusion=false, const ProgressState *state=0, unsigned char blankValue=255, bool clearVertexColorUnderTexture=true, std::map< int, std::map< int, cv::Vec4d > > *gains=0, std::map< int, std::map< int, cv::Mat > > *blendingGains=0, std::pair< float, float > *contrastValues=0)
Definition: util3d_surface.cpp:1438
c_str
const char * c_str(Args &&...args)
v
Array< int, Dynamic, 1 > v
UDEBUG
#define UDEBUG(...)
rtabmap::util3d::concatenateTextureMeshes
pcl::TextureMesh::Ptr RTABMAP_CORE_EXPORT concatenateTextureMeshes(const std::list< pcl::TextureMesh::Ptr > &meshes)
Definition: util3d_surface.cpp:1006
UTimer
Definition: UTimer.h:46
rtabmap::LaserScan::angleIncrement
float angleIncrement() const
Definition: LaserScan.h:125
epsilon
double epsilon
rtabmap::Memory
Definition: Memory.h:64
scan_step::second
@ second
float
float
rtabmap::util3d::laserScanToPointCloud
pcl::PointCloud< pcl::PointXYZ >::Ptr RTABMAP_CORE_EXPORT laserScanToPointCloud(const LaserScan &laserScan, const Transform &transform=Transform())
Definition: util3d.cpp:2289
Eigen::Matrix
distance
Double_ distance(const OrientedPlane3_ &p)
Vector2::x
float x
I
I
rtabmap::util2d::exposureFusion
cv::Mat RTABMAP_CORE_EXPORT exposureFusion(const std::vector< cv::Mat > &images)
Definition: util2d.cpp:2027
rtabmap::LaserScan::hasIntensity
bool hasIntensity() const
Definition: LaserScan.h:136
handle::ptr
PyObject *& ptr()
cols
int cols
rtabmap::util3d::convertPolygonsToPCL
std::vector< pcl::Vertices > RTABMAP_CORE_EXPORT convertPolygonsToPCL(const std::vector< std::vector< RTABMAP_PCL_INDEX > > &polygons)
Definition: util3d_surface.cpp:1249
rtabmap::util3d::fixTextureMeshForVisualization
void RTABMAP_CORE_EXPORT fixTextureMeshForVisualization(pcl::TextureMesh &textureMesh)
Definition: util3d_surface.cpp:2262
dist
dist
pcl::TextureMapping::setMaxAngle
void setMaxAngle(float maxAngle)
Definition: texture_mapping.h:187
t
Point2 t(10, 10)
rtabmap::util3d::laserScanToPointCloudRGBNormal
pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr RTABMAP_CORE_EXPORT laserScanToPointCloudRGBNormal(const LaserScan &laserScan, const Transform &transform=Transform(), unsigned char r=100, unsigned char g=100, unsigned char b=100)
Definition: util3d.cpp:2396
UFile.h
dst
char * dst
Definition: lz4.h:354
rtabmap
Definition: CameraARCore.cpp:35
UDirectory
Definition: UDirectory.h:34
pcl::texture_mapping::CameraVector
std::vector< Camera, Eigen::aligned_allocator< Camera > > CameraVector
Definition: texture_mapping.h:96
rtabmap::util3d::laserScanToPointCloudNormal
pcl::PointCloud< pcl::PointNormal >::Ptr RTABMAP_CORE_EXPORT laserScanToPointCloudNormal(const LaserScan &laserScan, const Transform &transform=Transform())
Definition: util3d.cpp:2316
UERROR
#define UERROR(...)
rtabmap::LaserScan::angleMin
float angleMin() const
Definition: LaserScan.h:123
pcl::OrganizedFastMesh::setTrianglePixelSize
void setTrianglePixelSize(int triangle_size)
Set the edge length (in pixels) used for constructing the fixed mesh.
Definition: organized_fast_mesh.h:139
rtabmap::util3d::computeFastOrganizedNormals2D
pcl::PointCloud< pcl::Normal >::Ptr RTABMAP_CORE_EXPORT computeFastOrganizedNormals2D(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, int searchK=5, float searchRadius=0.0f, const Eigen::Vector3f &viewPoint=Eigen::Vector3f(0, 0, 0))
Definition: util3d_surface.cpp:3102
trace.model
model
Definition: trace.py:4
glm::inverse
GLM_FUNC_DECL matType< T, P > inverse(matType< T, P > const &m)
fo
fo
value
value
i
int i
uIsFinite
bool uIsFinite(const T &value)
Definition: UMath.h:53
result
RESULT & result
rtabmap::LaserScan::rangeMin
float rangeMin() const
Definition: LaserScan.h:121
rtabmap::util3d::adjustNormalsToViewPointImpl
void adjustNormalsToViewPointImpl(typename pcl::PointCloud< PointNormalT >::Ptr &cloud, const Eigen::Vector3f &viewpoint, float groundNormalsUp)
Definition: util3d_surface.cpp:3623
rtabmap::util3d::filterCloseVerticesFromMesh
std::vector< pcl::Vertices > RTABMAP_CORE_EXPORT filterCloseVerticesFromMesh(const pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr cloud, const std::vector< pcl::Vertices > &polygons, float radius, float angle, bool keepLatestInRadius)
Definition: util3d_surface.cpp:458
v2
v2


rtabmap
Author(s): Mathieu Labbe
autogenerated on Mon Apr 28 2025 02:46:07