util3d_surface.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
31 #include "rtabmap/core/util3d.h"
32 #include "rtabmap/core/util2d.h"
33 #include "rtabmap/core/Memory.h"
34 #include "rtabmap/core/DBDriver.h"
38 #include "rtabmap/utilite/UFile.h"
40 #include "rtabmap/utilite/UMath.h"
41 #include "rtabmap/utilite/UTimer.h"
42 #include <opencv2/core/core_c.h>
43 #include <opencv2/imgproc/types_c.h>
44 #include <pcl/search/kdtree.h>
45 #include <pcl/surface/gp3.h>
46 #include <pcl/features/normal_3d_omp.h>
47 #include <pcl/surface/mls.h>
49 #include <pcl/features/integral_image_normal.h>
50 
51 #ifdef RTABMAP_ALICE_VISION
52 #include <aliceVision/sfmData/SfMData.hpp>
53 #include <aliceVision/sfmDataIO/sfmDataIO.hpp>
54 #include <aliceVision/mesh/Mesh.hpp>
55 #include <aliceVision/mesh/Texturing.hpp>
56 #include <aliceVision/camera/Pinhole.hpp>
57 #include <boost/algorithm/string.hpp>
58 using namespace aliceVision;
59 #endif
60 
61 #ifndef DISABLE_VTK
62 #include <pcl/surface/vtk_smoothing/vtk_mesh_quadric_decimation.h>
63 #endif
64 
65 #if PCL_VERSION_COMPARE(>, 1, 11, 1)
66 #include <pcl/types.h>
67 #endif
68 #if PCL_VERSION_COMPARE(<, 1, 8, 0)
70 #else
71 #include <pcl/surface/organized_fast_mesh.h>
72 #include <pcl/surface/impl/marching_cubes.hpp>
73 #include <pcl/surface/impl/organized_fast_mesh.hpp>
74 #include <pcl/impl/instantiate.hpp>
75 #include <pcl/point_types.h>
76 
77 // Instantiations of specific point types
78 PCL_INSTANTIATE(OrganizedFastMesh, (pcl::PointXYZRGBNormal))
79 
80 #include <pcl/features/impl/normal_3d_omp.hpp>
81 #if PCL_VERSION_COMPARE(<=, 1, 8, 0)
82 #ifdef PCL_ONLY_CORE_POINT_TYPES
83 PCL_INSTANTIATE_PRODUCT(NormalEstimationOMP, ((pcl::PointXYZRGB))((pcl::Normal)))
84 #endif
85 #endif
86 #endif
87 
88 namespace rtabmap
89 {
90 
91 namespace util3d
92 {
93 
95  const std::vector<pcl::Vertices> & polygons,
96  int cloudSize,
97  std::vector<std::set<int> > & neighbors,
98  std::vector<std::set<int> > & vertexToPolygons)
99 {
100  vertexToPolygons = std::vector<std::set<int> >(cloudSize);
101  neighbors = std::vector<std::set<int> >(polygons.size());
102 
103  for(unsigned int i=0; i<polygons.size(); ++i)
104  {
105  std::set<int> vertices(polygons[i].vertices.begin(), polygons[i].vertices.end());
106 
107  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
108  {
109  int v = polygons[i].vertices.at(j);
110  for(std::set<int>::iterator iter=vertexToPolygons[v].begin(); iter!=vertexToPolygons[v].end(); ++iter)
111  {
112  int numSharedVertices = 0;
113  for(unsigned int k=0; k<polygons.at(*iter).vertices.size() && numSharedVertices<2; ++k)
114  {
115  if(vertices.find(polygons.at(*iter).vertices.at(k)) != vertices.end())
116  {
117  ++numSharedVertices;
118  }
119  }
120  if(numSharedVertices >= 2)
121  {
122  neighbors[*iter].insert(i);
123  neighbors[i].insert(*iter);
124  }
125  }
126  vertexToPolygons[v].insert(i);
127  }
128  }
129 }
130 
131 std::list<std::list<int> > clusterPolygons(
132  const std::vector<std::set<int> > & neighborPolygons,
133  int minClusterSize)
134 {
135  std::set<int> polygonsChecked;
136 
137  std::list<std::list<int> > clusters;
138 
139  for(unsigned int i=0; i<neighborPolygons.size(); ++i)
140  {
141  if(polygonsChecked.find(i) == polygonsChecked.end())
142  {
143  std::list<int> currentCluster;
144  currentCluster.push_back(i);
145  polygonsChecked.insert(i);
146 
147  for(std::list<int>::iterator iter=currentCluster.begin(); iter!=currentCluster.end(); ++iter)
148  {
149  // get neighbor polygons
150  std::set<int> neighbors = neighborPolygons[*iter];
151  for(std::set<int>::iterator jter=neighbors.begin(); jter!=neighbors.end(); ++jter)
152  {
153  if(polygonsChecked.insert(*jter).second)
154  {
155  currentCluster.push_back(*jter);
156  }
157  }
158  }
159  if((int)currentCluster.size() > minClusterSize)
160  {
161  clusters.push_back(currentCluster);
162  }
163  }
164  }
165  return clusters;
166 }
167 
168 std::vector<pcl::Vertices> organizedFastMesh(
169  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
170  double angleTolerance,
171  bool quad,
172  int trianglePixelSize,
173  const Eigen::Vector3f & viewpoint)
174 {
175  UDEBUG("size=%d angle=%f quad=%d triangleSize=%d", (int)cloud->size(), angleTolerance, quad?1:0, trianglePixelSize);
176  UASSERT(cloud->is_dense == false);
177  UASSERT(cloud->width > 1 && cloud->height > 1);
178 
180  ofm.setTrianglePixelSize (trianglePixelSize);
182  ofm.setInputCloud (cloud);
183  ofm.setAngleTolerance(angleTolerance);
184  ofm.setViewpoint(viewpoint);
185 
186  std::vector<pcl::Vertices> vertices;
187  ofm.reconstruct (vertices);
188 
189  if(quad)
190  {
191  //flip all polygons (right handed)
192  std::vector<pcl::Vertices> output(vertices.size());
193  for(unsigned int i=0; i<vertices.size(); ++i)
194  {
195  output[i].vertices.resize(4);
196  output[i].vertices[0] = vertices[i].vertices[0];
197  output[i].vertices[3] = vertices[i].vertices[1];
198  output[i].vertices[2] = vertices[i].vertices[2];
199  output[i].vertices[1] = vertices[i].vertices[3];
200  }
201  return output;
202  }
203 
204  return vertices;
205 }
206 std::vector<pcl::Vertices> organizedFastMesh(
207  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
208  double angleTolerance,
209  bool quad,
210  int trianglePixelSize,
211  const Eigen::Vector3f & viewpoint)
212 {
213  UDEBUG("size=%d angle=%f quad=%d triangleSize=%d", (int)cloud->size(), angleTolerance, quad?1:0, trianglePixelSize);
214  UASSERT(cloud->is_dense == false);
215  UASSERT(cloud->width > 1 && cloud->height > 1);
216 
218  ofm.setTrianglePixelSize (trianglePixelSize);
220  ofm.setInputCloud (cloud);
221  ofm.setAngleTolerance(angleTolerance);
222  ofm.setViewpoint(viewpoint);
223 
224  std::vector<pcl::Vertices> vertices;
225  ofm.reconstruct (vertices);
226 
227  if(quad)
228  {
229  //flip all polygons (right handed)
230  std::vector<pcl::Vertices> output(vertices.size());
231  for(unsigned int i=0; i<vertices.size(); ++i)
232  {
233  output[i].vertices.resize(4);
234  output[i].vertices[0] = vertices[i].vertices[0];
235  output[i].vertices[3] = vertices[i].vertices[1];
236  output[i].vertices[2] = vertices[i].vertices[2];
237  output[i].vertices[1] = vertices[i].vertices[3];
238  }
239  return output;
240  }
241 
242  return vertices;
243 }
244 std::vector<pcl::Vertices> organizedFastMesh(
245  const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
246  double angleTolerance,
247  bool quad,
248  int trianglePixelSize,
249  const Eigen::Vector3f & viewpoint)
250 {
251  UDEBUG("size=%d angle=%f quad=%d triangleSize=%d", (int)cloud->size(), angleTolerance, quad?1:0, trianglePixelSize);
252  UASSERT(cloud->is_dense == false);
253  UASSERT(cloud->width > 1 && cloud->height > 1);
254 
256  ofm.setTrianglePixelSize (trianglePixelSize);
258  ofm.setInputCloud (cloud);
259  ofm.setAngleTolerance(angleTolerance);
260  ofm.setViewpoint(viewpoint);
261 
262  std::vector<pcl::Vertices> vertices;
263  ofm.reconstruct (vertices);
264 
265  if(quad)
266  {
267  //flip all polygons (right handed)
268  std::vector<pcl::Vertices> output(vertices.size());
269  for(unsigned int i=0; i<vertices.size(); ++i)
270  {
271  output[i].vertices.resize(4);
272  output[i].vertices[0] = vertices[i].vertices[0];
273  output[i].vertices[3] = vertices[i].vertices[1];
274  output[i].vertices[2] = vertices[i].vertices[2];
275  output[i].vertices[1] = vertices[i].vertices[3];
276  }
277  return output;
278  }
279 
280  return vertices;
281 }
282 
284  pcl::PointCloud<pcl::PointXYZRGBNormal> & cloudA,
285  std::vector<pcl::Vertices> & polygonsA,
286  const pcl::PointCloud<pcl::PointXYZRGBNormal> & cloudB,
287  const std::vector<pcl::Vertices> & polygonsB)
288 {
289  UDEBUG("cloudA=%d polygonsA=%d cloudB=%d polygonsB=%d", (int)cloudA.size(), (int)polygonsA.size(), (int)cloudB.size(), (int)polygonsB.size());
290  UASSERT(!cloudA.isOrganized() && !cloudB.isOrganized());
291 
292  int sizeA = (int)cloudA.size();
293  cloudA += cloudB;
294 
295  int sizePolygonsA = (int)polygonsA.size();
296  polygonsA.resize(sizePolygonsA+polygonsB.size());
297 
298  for(unsigned int i=0; i<polygonsB.size(); ++i)
299  {
300  pcl::Vertices vertices = polygonsB[i];
301  for(unsigned int j=0; j<vertices.vertices.size(); ++j)
302  {
303  vertices.vertices[j] += sizeA;
304  }
305  polygonsA[i+sizePolygonsA] = vertices;
306  }
307 }
308 
310  pcl::PointCloud<pcl::PointXYZRGB> & cloudA,
311  std::vector<pcl::Vertices> & polygonsA,
312  const pcl::PointCloud<pcl::PointXYZRGB> & cloudB,
313  const std::vector<pcl::Vertices> & polygonsB)
314 {
315  UDEBUG("cloudA=%d polygonsA=%d cloudB=%d polygonsB=%d", (int)cloudA.size(), (int)polygonsA.size(), (int)cloudB.size(), (int)polygonsB.size());
316  UASSERT(!cloudA.isOrganized() && !cloudB.isOrganized());
317 
318  int sizeA = (int)cloudA.size();
319  cloudA += cloudB;
320 
321  int sizePolygonsA = (int)polygonsA.size();
322  polygonsA.resize(sizePolygonsA+polygonsB.size());
323 
324  for(unsigned int i=0; i<polygonsB.size(); ++i)
325  {
326  pcl::Vertices vertices = polygonsB[i];
327  for(unsigned int j=0; j<vertices.vertices.size(); ++j)
328  {
329  vertices.vertices[j] += sizeA;
330  }
331  polygonsA[i+sizePolygonsA] = vertices;
332  }
333 }
334 
336  const pcl::PointCloud<pcl::PointXYZRGBNormal> & cloud,
337  const std::vector<pcl::Vertices> & polygons,
338  pcl::PointCloud<pcl::PointXYZRGBNormal> & outputCloud,
339  std::vector<pcl::Vertices> & outputPolygons)
340 {
341  UDEBUG("size=%d polygons=%d", (int)cloud.size(), (int)polygons.size());
342  std::map<int, int> addedVertices; //<oldIndex, newIndex>
343  std::vector<int> output; //<oldIndex>
344  output.resize(cloud.size());
345  outputCloud.resize(cloud.size());
346  outputCloud.is_dense = true;
347  outputPolygons.resize(polygons.size());
348  int oi = 0;
349  for(unsigned int i=0; i<polygons.size(); ++i)
350  {
351  pcl::Vertices & v = outputPolygons[i];
352  v.vertices.resize(polygons[i].vertices.size());
353  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
354  {
355  std::map<int, int>::iterator iter = addedVertices.find(polygons[i].vertices[j]);
356  if(iter == addedVertices.end())
357  {
358  outputCloud[oi] = cloud.at(polygons[i].vertices[j]);
359  addedVertices.insert(std::make_pair(polygons[i].vertices[j], oi));
360  output[oi] = polygons[i].vertices[j];
361  v.vertices[j] = oi++;
362  }
363  else
364  {
365  v.vertices[j] = iter->second;
366  }
367  }
368  }
369  outputCloud.resize(oi);
370  output.resize(oi);
371 
372  return output;
373 }
374 
376  const pcl::PointCloud<pcl::PointXYZRGB> & cloud,
377  const std::vector<pcl::Vertices> & polygons,
378  pcl::PointCloud<pcl::PointXYZRGB> & outputCloud,
379  std::vector<pcl::Vertices> & outputPolygons)
380 {
381  UDEBUG("size=%d polygons=%d", (int)cloud.size(), (int)polygons.size());
382  std::map<int, int> addedVertices; //<oldIndex, newIndex>
383  std::vector<int> output; //<oldIndex>
384  output.resize(cloud.size());
385  outputCloud.resize(cloud.size());
386  outputCloud.is_dense = true;
387  outputPolygons.resize(polygons.size());
388  int oi = 0;
389  for(unsigned int i=0; i<polygons.size(); ++i)
390  {
391  pcl::Vertices & v = outputPolygons[i];
392  v.vertices.resize(polygons[i].vertices.size());
393  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
394  {
395  std::map<int, int>::iterator iter = addedVertices.find(polygons[i].vertices[j]);
396  if(iter == addedVertices.end())
397  {
398  outputCloud[oi] = cloud.at(polygons[i].vertices[j]);
399  addedVertices.insert(std::make_pair(polygons[i].vertices[j], oi));
400  output[oi] = polygons[i].vertices[j];
401  v.vertices[j] = oi++;
402  }
403  else
404  {
405  v.vertices[j] = iter->second;
406  }
407  }
408  }
409  outputCloud.resize(oi);
410  output.resize(oi);
411 
412  return output;
413 }
414 
415 std::vector<int> filterNaNPointsFromMesh(
416  const pcl::PointCloud<pcl::PointXYZRGB> & cloud,
417  const std::vector<pcl::Vertices> & polygons,
418  pcl::PointCloud<pcl::PointXYZRGB> & outputCloud,
419  std::vector<pcl::Vertices> & outputPolygons)
420 {
421  UDEBUG("size=%d polygons=%d", (int)cloud.size(), (int)polygons.size());
422  std::map<int, int> addedVertices; //<oldIndex, newIndex>
423  std::vector<int> output; //<oldIndex>
424  output.resize(cloud.size());
425  outputCloud.resize(cloud.size());
426  outputCloud.is_dense = true;
427  std::vector<int> organizedToDense(cloud.size(), -1);
428 
429  int oi = 0;
430  for(unsigned int i=0; i<cloud.size(); ++i)
431  {
432  if(pcl::isFinite(cloud.at(i)))
433  {
434  outputCloud.at(oi) = cloud.at(i);
435  output[oi] = i;
436  organizedToDense[i] = oi;
437  ++oi;
438  }
439  }
440  outputCloud.resize(oi);
441  output.resize(oi);
442 
443  // remap polygons to dense cloud
444  outputPolygons = polygons;
445  for(unsigned int i=0; i<outputPolygons.size(); ++i)
446  {
447  pcl::Vertices & v = outputPolygons[i];
448  for(unsigned int j=0; j<v.vertices.size(); ++j)
449  {
450  UASSERT(organizedToDense[v.vertices[j]] >= 0);
451  v.vertices[j] = organizedToDense[v.vertices[j]];
452  }
453  }
454 
455  return output;
456 }
457 
458 std::vector<pcl::Vertices> filterCloseVerticesFromMesh(
459  const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud,
460  const std::vector<pcl::Vertices> & polygons,
461  float radius,
462  float angle, // FIXME angle not used
463  bool keepLatestInRadius)
464 {
465  UDEBUG("size=%d polygons=%d radius=%f angle=%f keepLatest=%d",
466  (int)cloud->size(), (int)polygons.size(), radius, angle, keepLatestInRadius?1:0);
467  std::vector<pcl::Vertices> outputPolygons;
468  pcl::KdTreeFLANN<pcl::PointXYZRGBNormal>::Ptr kdtree(new pcl::KdTreeFLANN<pcl::PointXYZRGBNormal>);
469  kdtree->setInputCloud(cloud);
470 
471  std::map<int, int> verticesDone;
472  outputPolygons = polygons;
473  for(unsigned int i=0; i<outputPolygons.size(); ++i)
474  {
475  pcl::Vertices & polygon = outputPolygons[i];
476  for(unsigned int j=0; j<polygon.vertices.size(); ++j)
477  {
478  std::map<int, int>::iterator iter = verticesDone.find(polygon.vertices[j]);
479  if(iter != verticesDone.end())
480  {
481  polygon.vertices[j] = iter->second;
482  }
483  else
484  {
485  std::vector<int> kIndices;
486  std::vector<float> kDistances;
487  kdtree->radiusSearch(polygon.vertices[j], radius, kIndices, kDistances);
488  if(kIndices.size())
489  {
490  int reference = -1;
491  for(unsigned int z=0; z<kIndices.size(); ++z)
492  {
493  if(reference == -1)
494  {
495  reference = kIndices[z];
496  }
497  else if(keepLatestInRadius)
498  {
499  if(kIndices[z] < reference)
500  {
501  reference = kIndices[z];
502  }
503  }
504  else
505  {
506  if(kIndices[z] > reference)
507  {
508  reference = kIndices[z];
509  }
510  }
511  }
512  if(reference >= 0)
513  {
514  for(unsigned int z=0; z<kIndices.size(); ++z)
515  {
516  verticesDone.insert(std::make_pair(kIndices[j], reference));
517  }
518  polygon.vertices[j] = reference;
519  }
520  }
521  else
522  {
523  verticesDone.insert(std::make_pair(polygon.vertices[j], polygon.vertices[j]));
524  }
525  }
526  }
527  }
528  return outputPolygons;
529 }
530 
531 std::vector<pcl::Vertices> filterInvalidPolygons(const std::vector<pcl::Vertices> & polygons)
532 {
533  std::vector<pcl::Vertices> output(polygons.size());
534  int oi=0;
535  for(unsigned int i=0; i<polygons.size(); ++i)
536  {
537  bool valid = true;
538  for(unsigned int j=0; j<polygons[i].vertices.size(); ++j)
539  {
540  if(polygons[i].vertices[j] == polygons[i].vertices[(j+1)%polygons[i].vertices.size()])
541  {
542  valid = false;
543  break;
544  }
545  }
546  if(valid)
547  {
548  output[oi++] = polygons[i];
549  }
550  }
551  output.resize(oi);
552  return output;
553 }
554 
555 pcl::PolygonMesh::Ptr createMesh(
556  const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloudWithNormals,
557  float gp3SearchRadius,
558  float gp3Mu,
559  int gp3MaximumNearestNeighbors,
560  float gp3MaximumSurfaceAngle,
561  float gp3MinimumAngle,
562  float gp3MaximumAngle,
563  bool gp3NormalConsistency)
564 {
565  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloudWithNormalsNoNaN = removeNaNNormalsFromPointCloud(cloudWithNormals);
566 
567  // Create search tree*
568  pcl::search::KdTree<pcl::PointXYZRGBNormal>::Ptr tree2 (new pcl::search::KdTree<pcl::PointXYZRGBNormal>);
569  tree2->setInputCloud (cloudWithNormalsNoNaN);
570 
571  // Initialize objects
572  pcl::GreedyProjectionTriangulation<pcl::PointXYZRGBNormal> gp3;
573  pcl::PolygonMesh::Ptr mesh(new pcl::PolygonMesh);
574 
575  // Set the maximum distance between connected points (maximum edge length)
576  gp3.setSearchRadius (gp3SearchRadius);
577 
578  // Set typical values for the parameters
579  gp3.setMu (gp3Mu);
580  gp3.setMaximumNearestNeighbors (gp3MaximumNearestNeighbors);
581  gp3.setMaximumSurfaceAngle(gp3MaximumSurfaceAngle); // 45 degrees
582  gp3.setMinimumAngle(gp3MinimumAngle); // 10 degrees
583  gp3.setMaximumAngle(gp3MaximumAngle); // 120 degrees
584  gp3.setNormalConsistency(gp3NormalConsistency);
585  gp3.setConsistentVertexOrdering(gp3NormalConsistency);
586 
587  // Get result
588  gp3.setInputCloud (cloudWithNormalsNoNaN);
589  gp3.setSearchMethod (tree2);
590  gp3.reconstruct (*mesh);
591 
592  //UASSERT(mesh->cloud.data.size()/mesh->cloud.point_step == cloudWithNormalsNoNaN->size());
593  //mesh->polygons = normalizePolygonsSide(*cloudWithNormalsNoNaN, mesh->polygons);
594 
595  return mesh;
596 }
597 
599  const std::map<int, Transform> & poses,
600  const std::map<int, std::vector<CameraModel> > & cameraModels,
601  const std::map<int, cv::Mat> & cameraDepths,
602  const std::vector<float> & roiRatios)
603 {
604  UASSERT_MSG(poses.size() == cameraModels.size(), uFormat("%d vs %d", (int)poses.size(), (int)cameraModels.size()).c_str());
605  UASSERT(roiRatios.empty() || roiRatios.size() == 4);
607  std::map<int, Transform>::const_iterator poseIter=poses.begin();
608  std::map<int, std::vector<CameraModel> >::const_iterator modelIter=cameraModels.begin();
609  for(; poseIter!=poses.end(); ++poseIter, ++modelIter)
610  {
611  UASSERT(poseIter->first == modelIter->first);
612 
613  std::map<int, cv::Mat>::const_iterator depthIter = cameraDepths.find(poseIter->first);
614 
615  // for each sub camera
616  for(unsigned int i=0; i<modelIter->second.size(); ++i)
617  {
619  // should be in camera frame
620  UASSERT(!modelIter->second[i].localTransform().isNull() && !poseIter->second.isNull());
621  Transform t = poseIter->second*modelIter->second[i].localTransform();
622 
623  cam.pose = t.toEigen3f();
624 
625  if(modelIter->second[i].imageHeight() <=0 || modelIter->second[i].imageWidth() <=0)
626  {
627  UERROR("Should have camera models with width/height set to create texture cameras!");
629  }
630 
631  UASSERT(modelIter->second[i].fx()>0 && modelIter->second[i].imageHeight()>0 && modelIter->second[i].imageWidth()>0);
632  cam.focal_length=modelIter->second[i].fx();
633  cam.height=modelIter->second[i].imageHeight();
634  cam.width=modelIter->second[i].imageWidth();
635  if(modelIter->second.size() == 1)
636  {
637  cam.texture_file = uFormat("%d", poseIter->first); // camera index
638  }
639  else
640  {
641  cam.texture_file = uFormat("%d_%d", poseIter->first, (int)i); // camera index, sub camera model index
642  }
643  if(!roiRatios.empty())
644  {
645  cam.roi.resize(4);
646  cam.roi[0] = cam.width * roiRatios[0]; // left -> x
647  cam.roi[1] = cam.height * roiRatios[2]; // top -> y
648  cam.roi[2] = cam.width * (1.0 - roiRatios[1]) - cam.roi[0]; // right -> width
649  cam.roi[3] = cam.height * (1.0 - roiRatios[3]) - cam.roi[1]; // bottom -> height
650  }
651 
652  if(depthIter != cameraDepths.end() && !depthIter->second.empty())
653  {
654  UASSERT(depthIter->second.type() == CV_32FC1 || depthIter->second.type() == CV_16UC1);
655  UASSERT(depthIter->second.cols % modelIter->second.size() == 0);
656  int subWidth = depthIter->second.cols/(modelIter->second.size());
657  cam.depth = cv::Mat(depthIter->second, cv::Range(0, depthIter->second.rows), cv::Range(subWidth*i, subWidth*(i+1)));
658  }
659 
660  UDEBUG("%f", cam.focal_length);
661  UDEBUG("%f", cam.height);
662  UDEBUG("%f", cam.width);
663  UDEBUG("cam.pose=%s", t.prettyPrint().c_str());
664 
665  cameras.push_back(cam);
666  }
667  }
668  return cameras;
669 }
670 
671 pcl::TextureMesh::Ptr createTextureMesh(
672  const pcl::PolygonMesh::Ptr & mesh,
673  const std::map<int, Transform> & poses,
674  const std::map<int, CameraModel> & cameraModels,
675  const std::map<int, cv::Mat> & cameraDepths,
676  float maxDistance,
677  float maxDepthError,
678  float maxAngle,
679  int minClusterSize,
680  const std::vector<float> & roiRatios,
681  const ProgressState * state,
682  std::vector<std::map<int, pcl::PointXY> > * vertexToPixels,
683  bool distanceToCamPolicy)
684 {
685  std::map<int, std::vector<CameraModel> > cameraSubModels;
686  for(std::map<int, CameraModel>::const_iterator iter=cameraModels.begin(); iter!=cameraModels.end(); ++iter)
687  {
688  std::vector<CameraModel> models;
689  models.push_back(iter->second);
690  cameraSubModels.insert(std::make_pair(iter->first, models));
691  }
692 
693  return createTextureMesh(
694  mesh,
695  poses,
696  cameraSubModels,
697  cameraDepths,
698  maxDistance,
699  maxDepthError,
700  maxAngle,
701  minClusterSize,
702  roiRatios,
703  state,
704  vertexToPixels,
705  distanceToCamPolicy);
706 }
707 
708 pcl::TextureMesh::Ptr createTextureMesh(
709  const pcl::PolygonMesh::Ptr & mesh,
710  const std::map<int, Transform> & poses,
711  const std::map<int, std::vector<CameraModel> > & cameraModels,
712  const std::map<int, cv::Mat> & cameraDepths,
713  float maxDistance,
714  float maxDepthError,
715  float maxAngle,
716  int minClusterSize,
717  const std::vector<float> & roiRatios,
718  const ProgressState * state,
719  std::vector<std::map<int, pcl::PointXY> > * vertexToPixels,
720  bool distanceToCamPolicy)
721 {
722  UASSERT(mesh->polygons.size());
723  pcl::TextureMesh::Ptr textureMesh(new pcl::TextureMesh);
724  textureMesh->cloud = mesh->cloud;
725  textureMesh->tex_polygons.push_back(mesh->polygons);
726 
727  // Original from pcl/gpu/kinfu_large_scale/tools/standalone_texture_mapping.cpp:
728  // Author: Raphael Favier, Technical University Eindhoven, (r.mysurname <aT> tue.nl)
729 
730  // Create the texturemesh object that will contain our UV-mapped mesh
731 
732  // create cameras
734  poses,
735  cameraModels,
736  cameraDepths,
737  roiRatios);
738 
739  // Create materials for each texture (and one extra for occluded faces)
740  textureMesh->tex_materials.resize (cameras.size () + 1);
741  for(unsigned int i = 0 ; i <= cameras.size() ; ++i)
742  {
743  pcl::TexMaterial mesh_material;
744  mesh_material.tex_Ka.r = 0.2f;
745  mesh_material.tex_Ka.g = 0.2f;
746  mesh_material.tex_Ka.b = 0.2f;
747 
748  mesh_material.tex_Kd.r = 0.8f;
749  mesh_material.tex_Kd.g = 0.8f;
750  mesh_material.tex_Kd.b = 0.8f;
751 
752  mesh_material.tex_Ks.r = 1.0f;
753  mesh_material.tex_Ks.g = 1.0f;
754  mesh_material.tex_Ks.b = 1.0f;
755 
756  mesh_material.tex_d = 1.0f;
757  mesh_material.tex_Ns = 75.0f;
758  mesh_material.tex_illum = 2;
759 
760  std::stringstream tex_name;
761  tex_name << "material_" << i;
762  tex_name >> mesh_material.tex_name;
763 
764  if(i < cameras.size ())
765  {
766  mesh_material.tex_file = cameras[i].texture_file;
767  }
768  else
769  {
770  mesh_material.tex_file = "occluded";
771  }
772 
773  textureMesh->tex_materials[i] = mesh_material;
774  }
775 
776  // Texture by projection
777  pcl::TextureMapping<pcl::PointXYZ> tm; // TextureMapping object that will perform the sort
778  tm.setMaxDistance(maxDistance);
779  tm.setMaxAngle(maxAngle);
780  if(maxDepthError > 0.0f)
781  {
782  tm.setMaxDepthError(maxDepthError);
783  }
784  tm.setMinClusterSize(minClusterSize);
785  if(tm.textureMeshwithMultipleCameras2(*textureMesh, cameras, state, vertexToPixels, distanceToCamPolicy))
786  {
787  // compute normals for the mesh if not already here
788  bool hasNormals = false;
789  bool hasColors = false;
790  for(unsigned int i=0; i<textureMesh->cloud.fields.size(); ++i)
791  {
792  if(textureMesh->cloud.fields[i].name.compare("normal_x") == 0)
793  {
794  hasNormals = true;
795  }
796  else if(textureMesh->cloud.fields[i].name.compare("rgb") == 0)
797  {
798  hasColors = true;
799  }
800  }
801  if(!hasNormals)
802  {
803  // use polygons
804  if(hasColors)
805  {
806  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZRGBNormal>);
807  pcl::fromPCLPointCloud2(mesh->cloud, *cloud);
808 
809  for(unsigned int i=0; i<mesh->polygons.size(); ++i)
810  {
811  pcl::Vertices & v = mesh->polygons[i];
812  UASSERT(v.vertices.size()>2);
813  Eigen::Vector3f v0(
814  cloud->at(v.vertices[1]).x - cloud->at(v.vertices[0]).x,
815  cloud->at(v.vertices[1]).y - cloud->at(v.vertices[0]).y,
816  cloud->at(v.vertices[1]).z - cloud->at(v.vertices[0]).z);
817  int last = v.vertices.size()-1;
818  Eigen::Vector3f v1(
819  cloud->at(v.vertices[last]).x - cloud->at(v.vertices[0]).x,
820  cloud->at(v.vertices[last]).y - cloud->at(v.vertices[0]).y,
821  cloud->at(v.vertices[last]).z - cloud->at(v.vertices[0]).z);
822  Eigen::Vector3f normal = v0.cross(v1);
823  normal.normalize();
824  // flat normal (per face)
825  for(unsigned int j=0; j<v.vertices.size(); ++j)
826  {
827  cloud->at(v.vertices[j]).normal_x = normal[0];
828  cloud->at(v.vertices[j]).normal_y = normal[1];
829  cloud->at(v.vertices[j]).normal_z = normal[2];
830  }
831  }
832  pcl::toPCLPointCloud2 (*cloud, textureMesh->cloud);
833  }
834  else
835  {
836  pcl::PointCloud<pcl::PointNormal>::Ptr cloud (new pcl::PointCloud<pcl::PointNormal>);
837  pcl::fromPCLPointCloud2(mesh->cloud, *cloud);
838 
839  for(unsigned int i=0; i<mesh->polygons.size(); ++i)
840  {
841  pcl::Vertices & v = mesh->polygons[i];
842  UASSERT(v.vertices.size()>2);
843  Eigen::Vector3f v0(
844  cloud->at(v.vertices[1]).x - cloud->at(v.vertices[0]).x,
845  cloud->at(v.vertices[1]).y - cloud->at(v.vertices[0]).y,
846  cloud->at(v.vertices[1]).z - cloud->at(v.vertices[0]).z);
847  int last = v.vertices.size()-1;
848  Eigen::Vector3f v1(
849  cloud->at(v.vertices[last]).x - cloud->at(v.vertices[0]).x,
850  cloud->at(v.vertices[last]).y - cloud->at(v.vertices[0]).y,
851  cloud->at(v.vertices[last]).z - cloud->at(v.vertices[0]).z);
852  Eigen::Vector3f normal = v0.cross(v1);
853  normal.normalize();
854  // flat normal (per face)
855  for(unsigned int j=0; j<v.vertices.size(); ++j)
856  {
857  cloud->at(v.vertices[j]).normal_x = normal[0];
858  cloud->at(v.vertices[j]).normal_y = normal[1];
859  cloud->at(v.vertices[j]).normal_z = normal[2];
860  }
861  }
862  pcl::toPCLPointCloud2 (*cloud, textureMesh->cloud);
863  }
864  }
865  }
866  return textureMesh;
867 }
868 
870  pcl::TextureMesh & textureMesh,
871  int minClusterSize)
872 {
873  UDEBUG("minClusterSize=%d", minClusterSize);
874  // Remove occluded polygons (polygons with no texture)
875  if(textureMesh.tex_coordinates.size())
876  {
877  // assume last texture is the occluded texture
878  textureMesh.tex_coordinates.pop_back();
879  textureMesh.tex_polygons.pop_back();
880  textureMesh.tex_materials.pop_back();
881 
882  if(minClusterSize!=0)
883  {
884  // concatenate all polygons
885  unsigned int totalSize = 0;
886  for(unsigned int t=0; t<textureMesh.tex_polygons.size(); ++t)
887  {
888  totalSize+=textureMesh.tex_polygons[t].size();
889  }
890  std::vector<pcl::Vertices> allPolygons(totalSize);
891  int oi=0;
892  for(unsigned int t=0; t<textureMesh.tex_polygons.size(); ++t)
893  {
894  for(unsigned int i=0; i<textureMesh.tex_polygons[t].size(); ++i)
895  {
896  allPolygons[oi++] = textureMesh.tex_polygons[t][i];
897  }
898  }
899 
900  // filter polygons
901  std::vector<std::set<int> > neighbors;
902  std::vector<std::set<int> > vertexToPolygons;
903  util3d::createPolygonIndexes(allPolygons,
904  (int)textureMesh.cloud.data.size()/textureMesh.cloud.point_step,
905  neighbors,
906  vertexToPolygons);
907 
908  std::list<std::list<int> > clusters = util3d::clusterPolygons(
909  neighbors,
910  minClusterSize<0?0:minClusterSize);
911 
912  std::set<int> validPolygons;
913  if(minClusterSize < 0)
914  {
915  // only keep the biggest cluster
916  std::list<std::list<int> >::iterator biggestClusterIndex = clusters.end();
917  unsigned int biggestClusterSize = 0;
918  for(std::list<std::list<int> >::iterator iter=clusters.begin(); iter!=clusters.end(); ++iter)
919  {
920  if(iter->size() > biggestClusterSize)
921  {
922  biggestClusterIndex = iter;
923  biggestClusterSize = iter->size();
924  }
925  }
926  if(biggestClusterIndex != clusters.end())
927  {
928  for(std::list<int>::iterator jter=biggestClusterIndex->begin(); jter!=biggestClusterIndex->end(); ++jter)
929  {
930  validPolygons.insert(*jter);
931  }
932  }
933  }
934  else
935  {
936  for(std::list<std::list<int> >::iterator iter=clusters.begin(); iter!=clusters.end(); ++iter)
937  {
938  for(std::list<int>::iterator jter=iter->begin(); jter!=iter->end(); ++jter)
939  {
940  validPolygons.insert(*jter);
941  }
942  }
943  }
944 
945  if(validPolygons.size() == 0)
946  {
947  UWARN("All %d polygons filtered after polygon cluster filtering. Cluster minimum size is %d.",totalSize, minClusterSize);
948  }
949 
950  // for each texture
951  unsigned int allPolygonsIndex = 0;
952  for(unsigned int t=0; t<textureMesh.tex_polygons.size(); ++t)
953  {
954  std::vector<pcl::Vertices> filteredPolygons(textureMesh.tex_polygons[t].size());
955 #if PCL_VERSION_COMPARE(>=, 1, 8, 0)
956  std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > filteredCoordinates(textureMesh.tex_coordinates[t].size());
957 #else
958  std::vector<Eigen::Vector2f> filteredCoordinates(textureMesh.tex_coordinates[t].size());
959 #endif
960 
961  if(textureMesh.tex_polygons[t].size())
962  {
963  UASSERT_MSG(allPolygonsIndex < allPolygons.size(), uFormat("%d vs %d", (int)allPolygonsIndex, (int)allPolygons.size()).c_str());
964 
965  // make index polygon to coordinate
966  std::vector<unsigned int> polygonToCoord(textureMesh.tex_polygons[t].size());
967  unsigned int totalCoord = 0;
968  for(unsigned int i=0; i<textureMesh.tex_polygons[t].size(); ++i)
969  {
970  polygonToCoord[i] = totalCoord;
971  totalCoord+=textureMesh.tex_polygons[t][i].vertices.size();
972  }
973  UASSERT_MSG(totalCoord == textureMesh.tex_coordinates[t].size(), uFormat("%d vs %d", totalCoord, (int)textureMesh.tex_coordinates[t].size()).c_str());
974 
975  int oi=0;
976  int ci=0;
977  for(unsigned int i=0; i<textureMesh.tex_polygons[t].size(); ++i)
978  {
979  if(validPolygons.find(allPolygonsIndex) != validPolygons.end())
980  {
981  filteredPolygons[oi] = textureMesh.tex_polygons[t].at(i);
982  for(unsigned int j=0; j<filteredPolygons[oi].vertices.size(); ++j)
983  {
984  UASSERT(polygonToCoord[i] < textureMesh.tex_coordinates[t].size());
985  filteredCoordinates[ci] = textureMesh.tex_coordinates[t][polygonToCoord[i]+j];
986  ++ci;
987  }
988  ++oi;
989  }
990  ++allPolygonsIndex;
991  }
992  filteredPolygons.resize(oi);
993  filteredCoordinates.resize(ci);
994  textureMesh.tex_polygons[t] = filteredPolygons;
995  textureMesh.tex_coordinates[t] = filteredCoordinates;
996  }
997  }
998  }
999  }
1000 }
1001 
1002 pcl::TextureMesh::Ptr concatenateTextureMeshes(const std::list<pcl::TextureMesh::Ptr> & meshes)
1003 {
1004  pcl::TextureMesh::Ptr output(new pcl::TextureMesh);
1005  std::map<std::string, int> addedMaterials; //<file, index>
1006  for(std::list<pcl::TextureMesh::Ptr>::const_iterator iter = meshes.begin(); iter!=meshes.end(); ++iter)
1007  {
1008  if((*iter)->cloud.point_step &&
1009  (*iter)->cloud.data.size()/(*iter)->cloud.point_step &&
1010  (*iter)->tex_polygons.size() &&
1011  (*iter)->tex_coordinates.size())
1012  {
1013  // append point cloud
1014  int polygonStep = output->cloud.height * output->cloud.width;
1015  pcl::PCLPointCloud2 tmp;
1016 #if PCL_VERSION_COMPARE(>=, 1, 10, 0)
1017  pcl::concatenate(output->cloud, iter->get()->cloud, tmp);
1018 #else
1019  pcl::concatenatePointCloud(output->cloud, iter->get()->cloud, tmp);
1020 #endif
1021  output->cloud = tmp;
1022 
1023  UASSERT((*iter)->tex_polygons.size() == (*iter)->tex_coordinates.size() &&
1024  (*iter)->tex_polygons.size() == (*iter)->tex_materials.size());
1025 
1026  int materialCount = (*iter)->tex_polygons.size();
1027  for(int i=0; i<materialCount; ++i)
1028  {
1029  std::map<std::string, int>::iterator jter = addedMaterials.find((*iter)->tex_materials[i].tex_file);
1030  int index;
1031  if(jter != addedMaterials.end())
1032  {
1033  index = jter->second;
1034  }
1035  else
1036  {
1037  addedMaterials.insert(std::make_pair((*iter)->tex_materials[i].tex_file, output->tex_materials.size()));
1038  index = output->tex_materials.size();
1039  output->tex_materials.push_back((*iter)->tex_materials[i]);
1040  output->tex_materials.back().tex_name = uFormat("material_%d", index);
1041  output->tex_polygons.resize(output->tex_polygons.size() + 1);
1042  output->tex_coordinates.resize(output->tex_coordinates.size() + 1);
1043  }
1044 
1045  // update and append polygon indices
1046  int oi = output->tex_polygons[index].size();
1047  output->tex_polygons[index].resize(output->tex_polygons[index].size() + (*iter)->tex_polygons[i].size());
1048  for(unsigned int j=0; j<(*iter)->tex_polygons[i].size(); ++j)
1049  {
1050  pcl::Vertices polygon = (*iter)->tex_polygons[i][j];
1051  for(unsigned int k=0; k<polygon.vertices.size(); ++k)
1052  {
1053  polygon.vertices[k] += polygonStep;
1054  }
1055  output->tex_polygons[index][oi+j] = polygon;
1056  }
1057 
1058  // append uv coordinates
1059  oi = output->tex_coordinates[index].size();
1060  output->tex_coordinates[index].resize(output->tex_coordinates[index].size() + (*iter)->tex_coordinates[i].size());
1061  for(unsigned int j=0; j<(*iter)->tex_coordinates[i].size(); ++j)
1062  {
1063  output->tex_coordinates[index][oi+j] = (*iter)->tex_coordinates[i][j];
1064  }
1065  }
1066  }
1067  }
1068  return output;
1069 }
1070 
1071 int gcd(int a, int b) {
1072  return b == 0 ? a : gcd(b, a % b);
1073 }
1074 
1075 void concatenateTextureMaterials(pcl::TextureMesh & mesh, const cv::Size & imageSize, int textureSize, int maxTextures, float & scale, std::vector<bool> * materialsKept)
1076 {
1077  UASSERT(textureSize>0 && imageSize.width>0 && imageSize.height>0);
1078  if(maxTextures < 1)
1079  {
1080  maxTextures = 1;
1081  }
1082  int materials = 0;
1083  for(unsigned int i=0; i<mesh.tex_materials.size(); ++i)
1084  {
1085  if(mesh.tex_polygons.size())
1086  {
1087  ++materials;
1088  }
1089  }
1090  if(materials)
1091  {
1092  int w = imageSize.width; // 640
1093  int h = imageSize.height; // 480
1094  int g = gcd(w,h); // 160
1095  int a = w/g; // 4=640/160
1096  int b = h/g; // 3=480/160
1097  UDEBUG("w=%d h=%d g=%d a=%d b=%d", w, h, g, a, b);
1098  int colCount = 0;
1099  int rowCount = 0;
1100  float factor = 0.1f;
1101  float epsilon = 0.001f;
1102  scale = 1.0f;
1103  while((colCount*rowCount)*maxTextures < materials || (factor == 0.1f || scale > 1.0f))
1104  {
1105  // first run try scale = 1 (no scaling)
1106  if(factor!=0.1f)
1107  {
1108  scale = float(textureSize)/float(w*b*factor);
1109  }
1110  colCount = float(textureSize)/(scale*float(w));
1111  rowCount = float(textureSize)/(scale*float(h));
1112  factor+=epsilon; // search the maximum perfect fit
1113  }
1114  int outputTextures = (materials / (colCount*rowCount)) + (materials % (colCount*rowCount) > 0?1:0);
1115  UDEBUG("materials=%d col=%d row=%d output textures=%d factor=%f scale=%f", materials, colCount, rowCount, outputTextures, factor-epsilon, scale);
1116 
1117  UASSERT(mesh.tex_coordinates.size() == mesh.tex_materials.size() && mesh.tex_polygons.size() == mesh.tex_materials.size());
1118 
1119  // prepare size
1120  std::vector<int> totalPolygons(outputTextures, 0);
1121  std::vector<int> totalCoordinates(outputTextures, 0);
1122  int count = 0;
1123  for(unsigned int i=0; i<mesh.tex_materials.size(); ++i)
1124  {
1125  if(mesh.tex_polygons[i].size())
1126  {
1127  int indexMaterial = count / (colCount*rowCount);
1128  UASSERT(indexMaterial < outputTextures);
1129 
1130  totalPolygons[indexMaterial]+=mesh.tex_polygons[i].size();
1131  totalCoordinates[indexMaterial]+=mesh.tex_coordinates[i].size();
1132 
1133  ++count;
1134  }
1135  }
1136 
1137  pcl::TextureMesh outputMesh;
1138 
1139  int pi = 0;
1140  int ci = 0;
1141  int ti=0;
1142  float scaledHeight = float(int(scale*float(h)))/float(textureSize);
1143  float scaledWidth = float(int(scale*float(w)))/float(textureSize);
1144  float lowerBorderSize = 1.0f - scaledHeight*float(rowCount);
1145  UDEBUG("scaledWidth=%f scaledHeight=%f lowerBorderSize=%f", scaledWidth, scaledHeight, lowerBorderSize);
1146  if(materialsKept)
1147  {
1148  materialsKept->resize(mesh.tex_materials.size(), false);
1149  }
1150  for(unsigned int t=0; t<mesh.tex_materials.size(); ++t)
1151  {
1152  if(mesh.tex_polygons[t].size())
1153  {
1154  int indexMaterial = ti / (colCount*rowCount);
1155  UASSERT(indexMaterial < outputTextures);
1156  if((int)outputMesh.tex_polygons.size() <= indexMaterial)
1157  {
1158  std::vector<pcl::Vertices> newPolygons(totalPolygons[indexMaterial]);
1159 #if PCL_VERSION_COMPARE(>=, 1, 8, 0)
1160  std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > newCoordinates(totalCoordinates[indexMaterial]); // UV coordinates
1161 #else
1162  std::vector<Eigen::Vector2f> newCoordinates(totalCoordinates[indexMaterial]); // UV coordinates
1163 #endif
1164  outputMesh.tex_polygons.push_back(newPolygons);
1165  outputMesh.tex_coordinates.push_back(newCoordinates);
1166 
1167  pi=0;
1168  ci=0;
1169  }
1170 
1171  int row = (ti/colCount) % rowCount;
1172  int col = ti%colCount;
1173  float offsetU = scaledWidth * float(col);
1174  float offsetV = scaledHeight * float((rowCount - 1) - row) + lowerBorderSize;
1175  // Texture coords have lower-left origin
1176 
1177  for(unsigned int i=0; i<mesh.tex_polygons[t].size(); ++i)
1178  {
1179  UASSERT(pi < (int)outputMesh.tex_polygons[indexMaterial].size());
1180  outputMesh.tex_polygons[indexMaterial][pi++] = mesh.tex_polygons[t].at(i);
1181  }
1182 
1183  for(unsigned int i=0; i<mesh.tex_coordinates[t].size(); ++i)
1184  {
1185  const Eigen::Vector2f & v = mesh.tex_coordinates[t].at(i);
1186  if(v[0] >= 0 && v[1] >=0)
1187  {
1188  outputMesh.tex_coordinates[indexMaterial][ci][0] = v[0]*scaledWidth + offsetU;
1189  outputMesh.tex_coordinates[indexMaterial][ci][1] = v[1]*scaledHeight + offsetV;
1190  }
1191  else
1192  {
1193  outputMesh.tex_coordinates[indexMaterial][ci] = v;
1194  }
1195  ++ci;
1196  }
1197  ++ti;
1198  if(materialsKept)
1199  {
1200  materialsKept->at(t) = true;
1201  }
1202  }
1203  }
1204  pcl::TexMaterial m = mesh.tex_materials.front();
1205  mesh.tex_materials.clear();
1206  for(int i=0; i<outputTextures; ++i)
1207  {
1208  m.tex_file = "texture";
1209  m.tex_name = "material";
1210  if(outputTextures > 1)
1211  {
1212  m.tex_file += uNumber2Str(i);
1213  m.tex_name += uNumber2Str(i);
1214  }
1215 
1216  mesh.tex_materials.push_back(m);
1217  }
1218  mesh.tex_coordinates = outputMesh.tex_coordinates;
1219  mesh.tex_polygons = outputMesh.tex_polygons;
1220  }
1221 }
1222 
1223 std::vector<std::vector<RTABMAP_PCL_INDEX> > convertPolygonsFromPCL(const std::vector<pcl::Vertices> & polygons)
1224 {
1225  std::vector<std::vector<RTABMAP_PCL_INDEX> > polygonsOut(polygons.size());
1226  for(unsigned int p=0; p<polygons.size(); ++p)
1227  {
1228  polygonsOut[p] = polygons[p].vertices;
1229  }
1230  return polygonsOut;
1231 }
1232 std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > convertPolygonsFromPCL(const std::vector<std::vector<pcl::Vertices> > & tex_polygons)
1233 {
1234  std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > polygonsOut(tex_polygons.size());
1235  for(unsigned int t=0; t<tex_polygons.size(); ++t)
1236  {
1237  polygonsOut[t].resize(tex_polygons[t].size());
1238  for(unsigned int p=0; p<tex_polygons[t].size(); ++p)
1239  {
1240  polygonsOut[t][p] = tex_polygons[t][p].vertices;
1241  }
1242  }
1243  return polygonsOut;
1244 }
1245 std::vector<pcl::Vertices> convertPolygonsToPCL(const std::vector<std::vector<RTABMAP_PCL_INDEX> > & polygons)
1246 {
1247  std::vector<pcl::Vertices> polygonsOut(polygons.size());
1248  for(unsigned int p=0; p<polygons.size(); ++p)
1249  {
1250  polygonsOut[p].vertices = polygons[p];
1251  }
1252  return polygonsOut;
1253 }
1254 std::vector<std::vector<pcl::Vertices> > convertPolygonsToPCL(const std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > & tex_polygons)
1255 {
1256  std::vector<std::vector<pcl::Vertices> > polygonsOut(tex_polygons.size());
1257  for(unsigned int t=0; t<tex_polygons.size(); ++t)
1258  {
1259  polygonsOut[t].resize(tex_polygons[t].size());
1260  for(unsigned int p=0; p<tex_polygons[t].size(); ++p)
1261  {
1262  polygonsOut[t][p].vertices = tex_polygons[t][p];
1263  }
1264  }
1265  return polygonsOut;
1266 }
1267 
1268 pcl::TextureMesh::Ptr assembleTextureMesh(
1269  const cv::Mat & cloudMat,
1270  const std::vector<std::vector<std::vector<RTABMAP_PCL_INDEX> > > & polygons,
1271 #if PCL_VERSION_COMPARE(>=, 1, 8, 0)
1272  const std::vector<std::vector<Eigen::Vector2f, Eigen::aligned_allocator<Eigen::Vector2f> > > & texCoords,
1273 #else
1274  const std::vector<std::vector<Eigen::Vector2f> > & texCoords,
1275 #endif
1276  cv::Mat & textures,
1277  bool mergeTextures)
1278 {
1279  pcl::TextureMesh::Ptr textureMesh(new pcl::TextureMesh);
1280 
1281  if(cloudMat.channels() <= 3)
1282  {
1283  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::laserScanToPointCloud(LaserScan::backwardCompatibility(cloudMat));
1284  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1285  }
1286  else if(cloudMat.channels() == 4)
1287  {
1288  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGB(LaserScan::backwardCompatibility(cloudMat));
1289  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1290  }
1291  else if(cloudMat.channels() == 6)
1292  {
1293  pcl::PointCloud<pcl::PointNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudNormal(LaserScan::backwardCompatibility(cloudMat));
1294  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1295  }
1296  else if(cloudMat.channels() == 7)
1297  {
1298  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGBNormal(LaserScan::backwardCompatibility(cloudMat));
1299  pcl::toPCLPointCloud2(*cloud, textureMesh->cloud);
1300  }
1301 
1302  if(textureMesh->cloud.data.size() && polygons.size())
1303  {
1304  textureMesh->tex_polygons.resize(polygons.size());
1305  for(unsigned int t=0; t<polygons.size(); ++t)
1306  {
1307  textureMesh->tex_polygons[t].resize(polygons[t].size());
1308  for(unsigned int p=0; p<polygons[t].size(); ++p)
1309  {
1310  textureMesh->tex_polygons[t][p].vertices = polygons[t][p];
1311  }
1312  }
1313 
1314  if(!texCoords.empty() && !textures.empty())
1315  {
1316  textureMesh->tex_coordinates = texCoords;
1317 
1318  textureMesh->tex_materials.resize (textureMesh->tex_coordinates.size());
1319  for(unsigned int i = 0 ; i < textureMesh->tex_coordinates.size() ; ++i)
1320  {
1321  pcl::TexMaterial mesh_material;
1322  mesh_material.tex_Ka.r = 0.2f;
1323  mesh_material.tex_Ka.g = 0.2f;
1324  mesh_material.tex_Ka.b = 0.2f;
1325 
1326  mesh_material.tex_Kd.r = 0.8f;
1327  mesh_material.tex_Kd.g = 0.8f;
1328  mesh_material.tex_Kd.b = 0.8f;
1329 
1330  mesh_material.tex_Ks.r = 1.0f;
1331  mesh_material.tex_Ks.g = 1.0f;
1332  mesh_material.tex_Ks.b = 1.0f;
1333 
1334  mesh_material.tex_d = 1.0f;
1335  mesh_material.tex_Ns = 75.0f;
1336  mesh_material.tex_illum = 2;
1337 
1338  std::stringstream tex_name;
1339  tex_name << "material_" << i;
1340  tex_name >> mesh_material.tex_name;
1341 
1342  mesh_material.tex_file = uFormat("%d", i);
1343 
1344  textureMesh->tex_materials[i] = mesh_material;
1345  }
1346 
1347  if(mergeTextures && textures.cols/textures.rows > 1)
1348  {
1349  UASSERT(textures.cols % textures.rows == 0 && textures.cols/textures.rows == (int)textureMesh->tex_coordinates.size());
1350  std::vector<bool> materialsKept;
1351  float scale = 0.0f;
1352  cv::Size imageSize(textures.rows, textures.rows);
1353  int imageType = textures.type();
1354  rtabmap::util3d::concatenateTextureMaterials(*textureMesh, imageSize, textures.rows, 1, scale, &materialsKept);
1355  if(scale && textureMesh->tex_materials.size() == 1)
1356  {
1357  int cols = float(textures.rows)/(scale*imageSize.width);
1358  int rows = float(textures.rows)/(scale*imageSize.height);
1359 
1360  cv::Mat mergedTextures = cv::Mat(textures.rows, textures.rows, imageType, cv::Scalar::all(255));
1361 
1362  // make a blank texture
1363  cv::Size resizedImageSize(int(imageSize.width*scale), int(imageSize.height*scale));
1364  int oi=0;
1365  for(int i=0; i<(int)materialsKept.size(); ++i)
1366  {
1367  if(materialsKept.at(i))
1368  {
1369  int u = oi%cols * resizedImageSize.width;
1370  int v = ((oi/cols) % rows ) * resizedImageSize.height;
1371  UASSERT(u < textures.rows-resizedImageSize.width);
1372  UASSERT(v < textures.rows-resizedImageSize.height);
1373 
1374  cv::Mat resizedImage;
1375  cv::resize(textures(cv::Range::all(), cv::Range(i*textures.rows, (i+1)*textures.rows)), resizedImage, resizedImageSize, 0.0f, 0.0f, cv::INTER_AREA);
1376 
1377  UASSERT(resizedImage.type() == mergedTextures.type());
1378  resizedImage.copyTo(mergedTextures(cv::Rect(u, v, resizedImage.cols, resizedImage.rows)));
1379 
1380  ++oi;
1381  }
1382  }
1383  textures = mergedTextures;
1384  }
1385  }
1386  }
1387  }
1388  return textureMesh;
1389 }
1390 
1391 pcl::PolygonMesh::Ptr assemblePolygonMesh(
1392  const cv::Mat & cloudMat,
1393  const std::vector<std::vector<RTABMAP_PCL_INDEX> > & polygons)
1394 {
1395  pcl::PolygonMesh::Ptr polygonMesh(new pcl::PolygonMesh);
1396 
1397  if(cloudMat.channels() <= 3)
1398  {
1399  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = rtabmap::util3d::laserScanToPointCloud(LaserScan::backwardCompatibility(cloudMat));
1400  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1401  }
1402  else if(cloudMat.channels() == 4)
1403  {
1404  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGB(LaserScan::backwardCompatibility(cloudMat));
1405  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1406  }
1407  else if(cloudMat.channels() == 6)
1408  {
1409  pcl::PointCloud<pcl::PointNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudNormal(LaserScan::backwardCompatibility(cloudMat));
1410  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1411  }
1412  else if(cloudMat.channels() == 7)
1413  {
1414  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud = rtabmap::util3d::laserScanToPointCloudRGBNormal(LaserScan::backwardCompatibility(cloudMat));
1415  pcl::toPCLPointCloud2(*cloud, polygonMesh->cloud);
1416  }
1417 
1418  if(polygonMesh->cloud.data.size() && polygons.size())
1419  {
1420  polygonMesh->polygons.resize(polygons.size());
1421  for(unsigned int p=0; p<polygons.size(); ++p)
1422  {
1423  polygonMesh->polygons[p].vertices = polygons[p];
1424  }
1425  }
1426  return polygonMesh;
1427 }
1428 
1429 double sqr(uchar v)
1430 {
1431  return double(v)*double(v);
1432 }
1433 
1435  pcl::TextureMesh & mesh,
1436  const std::map<int, cv::Mat> & images,
1437  const std::map<int, CameraModel> & calibrations,
1438  const Memory * memory,
1439  const DBDriver * dbDriver,
1440  int textureSize,
1441  int textureCount,
1442  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels,
1443  bool gainCompensation,
1444  float gainBeta,
1445  bool gainRGB,
1446  bool blending,
1447  int blendingDecimation,
1448  int brightnessContrastRatioLow,
1449  int brightnessContrastRatioHigh,
1450  bool exposureFusion,
1451  const ProgressState * state,
1452  unsigned char blankValue,
1453  std::map<int, std::map<int, cv::Vec4d> > * gains,
1454  std::map<int, std::map<int, cv::Mat> > * blendingGains,
1455  std::pair<float, float> * contrastValues)
1456 {
1457  std::map<int, std::vector<CameraModel> > calibVectors;
1458  for(std::map<int, CameraModel>::const_iterator iter=calibrations.begin(); iter!=calibrations.end(); ++iter)
1459  {
1460  std::vector<CameraModel> m;
1461  m.push_back(iter->second);
1462  calibVectors.insert(std::make_pair(iter->first, m));
1463  }
1464  return mergeTextures(mesh,
1465  images,
1466  calibVectors,
1467  memory,
1468  dbDriver,
1469  textureSize,
1470  textureCount,
1471  vertexToPixels,
1472  gainCompensation,
1473  gainBeta,
1474  gainRGB,
1475  blending,
1476  blendingDecimation,
1477  brightnessContrastRatioLow,
1478  brightnessContrastRatioHigh,
1479  exposureFusion,
1480  state,
1481  blankValue,
1482  gains,
1483  blendingGains,
1484  contrastValues);
1485 }
1487  pcl::TextureMesh & mesh,
1488  const std::map<int, cv::Mat> & images,
1489  const std::map<int, std::vector<CameraModel> > & calibrations,
1490  const Memory * memory,
1491  const DBDriver * dbDriver,
1492  int textureSize,
1493  int textureCount,
1494  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels,
1495  bool gainCompensation,
1496  float gainBeta,
1497  bool gainRGB,
1498  bool blending,
1499  int blendingDecimation,
1500  int brightnessContrastRatioLow,
1501  int brightnessContrastRatioHigh,
1502  bool exposureFusion,
1503  const ProgressState * state,
1504  unsigned char blankValue,
1505  std::map<int, std::map<int, cv::Vec4d> > * gainsOut,
1506  std::map<int, std::map<int, cv::Mat> > * blendingGainsOut,
1507  std::pair<float, float> * contrastValuesOut)
1508 {
1509  //get texture size, if disabled use default 1024
1510  UASSERT(textureSize%256 == 0);
1511  UDEBUG("textureSize = %d", textureSize);
1512  cv::Mat globalTextures;
1513  if(mesh.tex_materials.size() > 1)
1514  {
1515  std::vector<std::pair<int, int> > textures(mesh.tex_materials.size(), std::pair<int, int>(-1,0));
1516  cv::Size imageSize;
1517  const int imageType=CV_8UC3;
1518 
1519  UDEBUG("");
1520  for(unsigned int i=0; i<mesh.tex_materials.size(); ++i)
1521  {
1522  std::list<std::string> texFileSplit = uSplit(mesh.tex_materials[i].tex_file, '_');
1523  if(!mesh.tex_materials[i].tex_file.empty() &&
1524  mesh.tex_polygons[i].size() &&
1525  uIsInteger(texFileSplit.front(), false))
1526  {
1527  textures[i].first = uStr2Int(texFileSplit.front());
1528  if(texFileSplit.size() == 2 &&
1529  uIsInteger(texFileSplit.back(), false) )
1530  {
1531  textures[i].second = uStr2Int(texFileSplit.back());
1532  }
1533 
1534  int textureId = textures[i].first;
1535  if(imageSize.width == 0 || imageSize.height == 0)
1536  {
1537  if(images.find(textureId) != images.end() &&
1538  !images.find(textureId)->second.empty() &&
1539  calibrations.find(textureId) != calibrations.end())
1540  {
1541  const std::vector<CameraModel> & models = calibrations.find(textureId)->second;
1542  UASSERT(models.size()>=1);
1543  if( models[0].imageHeight()>0 &&
1544  models[0].imageWidth()>0)
1545  {
1546  imageSize = models[0].imageSize();
1547  }
1548  else if(images.find(textureId)!=images.end())
1549  {
1550  // backward compatibility for image size not set in CameraModel
1551  cv::Mat image = images.find(textureId)->second;
1552  if(image.rows == 1 && image.type() == CV_8UC1)
1553  {
1554  image = uncompressImage(image);
1555  }
1556  UASSERT(!image.empty());
1557  imageSize = image.size();
1558  if(models.size()>1)
1559  {
1560  imageSize.width/=models.size();
1561  }
1562  }
1563  }
1564  else if(memory)
1565  {
1566  SensorData data = memory->getNodeData(textureId, true, false, false, false);
1567  std::vector<CameraModel> models = data.cameraModels();
1568  StereoCameraModel stereoModel = data.stereoCameraModel();
1569  if(models.size()>=1 &&
1570  models[0].imageHeight()>0 &&
1571  models[0].imageWidth()>0)
1572  {
1573  imageSize = models[0].imageSize();
1574  }
1575  else if(stereoModel.left().imageHeight() > 0 &&
1576  stereoModel.left().imageWidth() > 0)
1577  {
1578  imageSize = stereoModel.left().imageSize();
1579  }
1580  else // backward compatibility for image size not set in CameraModel
1581  {
1582  cv::Mat image;
1583  data.uncompressDataConst(&image, 0);
1584  UASSERT(!image.empty());
1585  imageSize = image.size();
1586  if(data.cameraModels().size()>1)
1587  {
1588  imageSize.width/=data.cameraModels().size();
1589  }
1590  }
1591  }
1592  else if(dbDriver)
1593  {
1594  std::vector<CameraModel> models;
1595  StereoCameraModel stereoModel;
1596  dbDriver->getCalibration(textureId, models, stereoModel);
1597  if(models.size()>=1 &&
1598  models[0].imageHeight()>0 &&
1599  models[0].imageWidth()>0)
1600  {
1601  imageSize = models[0].imageSize();
1602  }
1603  else if(stereoModel.left().imageHeight() > 0 &&
1604  stereoModel.left().imageWidth() > 0)
1605  {
1606  imageSize = stereoModel.left().imageSize();
1607  }
1608  else // backward compatibility for image size not set in CameraModel
1609  {
1610  SensorData data;
1611  dbDriver->getNodeData(textureId, data, true, false, false, false);
1612  cv::Mat image;
1613  data.uncompressDataConst(&image, 0);
1614  UASSERT(!image.empty());
1615  imageSize = image.size();
1616  if(data.cameraModels().size()>1)
1617  {
1618  imageSize.width/=data.cameraModels().size();
1619  }
1620  }
1621  }
1622  }
1623  }
1624  else if(mesh.tex_polygons[i].size() && mesh.tex_materials[i].tex_file.compare("occluded")!=0)
1625  {
1626  UWARN("Failed parsing texture file name: %s", mesh.tex_materials[i].tex_file.c_str());
1627  }
1628  }
1629  UDEBUG("textures=%d imageSize=%dx%d", (int)textures.size(), imageSize.height, imageSize.width);
1630  if(textures.size() && imageSize.height>0 && imageSize.width>0)
1631  {
1632  float scale = 0.0f;
1633  UDEBUG("");
1634  std::vector<bool> materialsKept;
1635  util3d::concatenateTextureMaterials(mesh, imageSize, textureSize, textureCount, scale, &materialsKept);
1636  if(scale && mesh.tex_materials.size())
1637  {
1638  int materials = (int)mesh.tex_materials.size();
1639  int cols = float(textureSize)/(scale*imageSize.width);
1640  int rows = float(textureSize)/(scale*imageSize.height);
1641 
1642  globalTextures = cv::Mat(textureSize, materials*textureSize, imageType, cv::Scalar::all(blankValue));
1643  cv::Mat globalTextureMasks = cv::Mat(textureSize, materials*textureSize, CV_8UC1, cv::Scalar::all(0));
1644 
1645  // used for multi camera texturing, to avoid reloading same texture for sub cameras
1646  cv::Mat previousImage;
1647  int previousTextureId = 0;
1648  std::vector<CameraModel> previousCameraModels;
1649 
1650  // make a blank texture
1651  cv::Mat emptyImage(int(imageSize.height*scale), int(imageSize.width*scale), imageType, cv::Scalar::all(blankValue));
1652  cv::Mat emptyImageMask(int(imageSize.height*scale), int(imageSize.width*scale), CV_8UC1, cv::Scalar::all(255));
1653  int oi=0;
1654  std::vector<cv::Point2i> imageOrigin(textures.size());
1655  std::vector<int> newCamIndex(textures.size(), -1);
1656  for(int t=0; t<(int)textures.size(); ++t)
1657  {
1658  if(materialsKept.at(t))
1659  {
1660  int indexMaterial = oi / (cols*rows);
1661  UASSERT(indexMaterial < materials);
1662 
1663  newCamIndex[t] = oi;
1664  int u = oi%cols * emptyImage.cols;
1665  int v = ((oi/cols) % rows ) * emptyImage.rows;
1666  UASSERT_MSG(u < textureSize-emptyImage.cols, uFormat("u=%d textureSize=%d emptyImage.cols=%d", u, textureSize, emptyImage.cols).c_str());
1667  UASSERT_MSG(v < textureSize-emptyImage.rows, uFormat("v=%d textureSize=%d emptyImage.rows=%d", v, textureSize, emptyImage.rows).c_str());
1668  imageOrigin[t].x = u;
1669  imageOrigin[t].y = v;
1670  if(textures[t].first>=0)
1671  {
1672  cv::Mat image;
1673  std::vector<CameraModel> models;
1674 
1675  if(textures[t].first == previousTextureId)
1676  {
1677  image = previousImage;
1678  models = previousCameraModels;
1679  }
1680  else
1681  {
1682  if(images.find(textures[t].first) != images.end() &&
1683  !images.find(textures[t].first)->second.empty() &&
1684  calibrations.find(textures[t].first) != calibrations.end())
1685  {
1686  image = images.find(textures[t].first)->second;
1687  if(image.rows == 1 && image.type() == CV_8UC1)
1688  {
1689  image = uncompressImage(image);
1690  }
1691  models = calibrations.find(textures[t].first)->second;
1692  }
1693  else if(memory)
1694  {
1695  SensorData data = memory->getNodeData(textures[t].first, true, false, false, false);
1696  models = data.cameraModels();
1697  data.uncompressDataConst(&image, 0);
1698  }
1699  else if(dbDriver)
1700  {
1701  SensorData data;
1702  dbDriver->getNodeData(textures[t].first, data, true, false, false, false);
1703  data.uncompressDataConst(&image, 0);
1704  StereoCameraModel stereoModel;
1705  dbDriver->getCalibration(textures[t].first, models, stereoModel);
1706  }
1707 
1708  previousImage = image;
1709  previousCameraModels = models;
1710  previousTextureId = textures[t].first;
1711  }
1712 
1713  UASSERT(!image.empty());
1714 
1715  if(textures[t].second>=0)
1716  {
1717  UASSERT(textures[t].second < (int)models.size());
1718  int width = image.cols/models.size();
1719  image = image.colRange(width*textures[t].second, width*(textures[t].second+1));
1720  }
1721 
1722  cv::Mat resizedImage;
1723  cv::resize(image, resizedImage, emptyImage.size(), 0.0f, 0.0f, cv::INTER_AREA);
1724  UASSERT(resizedImage.type() == CV_8UC1 || resizedImage.type() == CV_8UC3);
1725  if(resizedImage.type() == CV_8UC1)
1726  {
1727  cv::Mat resizedImageColor;
1728  cv::cvtColor(resizedImage, resizedImageColor, CV_GRAY2BGR);
1729  resizedImage = resizedImageColor;
1730  }
1731  UASSERT(resizedImage.type() == globalTextures.type());
1732  resizedImage.copyTo(globalTextures(cv::Rect(u+indexMaterial*globalTextures.rows, v, resizedImage.cols, resizedImage.rows)));
1733  emptyImageMask.copyTo(globalTextureMasks(cv::Rect(u+indexMaterial*globalTextureMasks.rows, v, resizedImage.cols, resizedImage.rows)));
1734  }
1735  else
1736  {
1737  emptyImage.copyTo(globalTextures(cv::Rect(u+indexMaterial*globalTextures.rows, v, emptyImage.cols, emptyImage.rows)));
1738  }
1739  ++oi;
1740  }
1741 
1742  if(state)
1743  {
1744  if(state->isCanceled())
1745  {
1746  return cv::Mat();
1747  }
1748  state->callback(uFormat("Assembled texture %d/%d.", t+1, (int)textures.size()));
1749  }
1750  }
1751 
1752  UTimer timer;
1753  if(vertexToPixels.size())
1754  {
1755  //UWARN("Saving original.png", globalTexture);
1756  //cv::imwrite("original.png", globalTexture);
1757 
1758  if(gainCompensation)
1759  {
1764  const int num_images = static_cast<int>(oi);
1765  cv::Mat_<int> N(num_images, num_images); N.setTo(0);
1766  cv::Mat_<double> I(num_images, num_images); I.setTo(0);
1767 
1768  cv::Mat_<double> IR(num_images, num_images); IR.setTo(0);
1769  cv::Mat_<double> IG(num_images, num_images); IG.setTo(0);
1770  cv::Mat_<double> IB(num_images, num_images); IB.setTo(0);
1771 
1772  // Adjust UV coordinates to globalTexture
1773  for(unsigned int p=0; p<vertexToPixels.size(); ++p)
1774  {
1775  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[p].begin(); iter!=vertexToPixels[p].end(); ++iter)
1776  {
1777  if(materialsKept.at(iter->first))
1778  {
1779  N(newCamIndex[iter->first], newCamIndex[iter->first]) +=1;
1780 
1781  std::map<int, pcl::PointXY>::const_iterator jter=iter;
1782  ++jter;
1783  int k = 1;
1784  for(; jter!=vertexToPixels[p].end(); ++jter, ++k)
1785  {
1786  if(materialsKept.at(jter->first))
1787  {
1788  int i = newCamIndex[iter->first];
1789  int j = newCamIndex[jter->first];
1790 
1791  N(i, j) += 1;
1792  N(j, i) += 1;
1793 
1794  int indexMaterial = i / (cols*rows);
1795 
1796  // uv in globalTexture
1797  int ui = iter->second.x*emptyImage.cols + imageOrigin[iter->first].x;
1798  int vi = (1.0-iter->second.y)*emptyImage.rows + imageOrigin[iter->first].y;
1799  int uj = jter->second.x*emptyImage.cols + imageOrigin[jter->first].x;
1800  int vj = (1.0-jter->second.y)*emptyImage.rows + imageOrigin[jter->first].y;
1801  cv::Vec3b * pt1 = globalTextures.ptr<cv::Vec3b>(vi,ui+indexMaterial*globalTextures.rows);
1802  cv::Vec3b * pt2 = globalTextures.ptr<cv::Vec3b>(vj,uj+indexMaterial*globalTextures.rows);
1803 
1804  I(i, j) += std::sqrt(static_cast<double>(sqr(pt1->val[0]) + sqr(pt1->val[1]) + sqr(pt1->val[2])));
1805  I(j, i) += std::sqrt(static_cast<double>(sqr(pt2->val[0]) + sqr(pt2->val[1]) + sqr(pt2->val[2])));
1806 
1807  IR(i, j) += static_cast<double>(pt1->val[2]);
1808  IR(j, i) += static_cast<double>(pt2->val[2]);
1809  IG(i, j) += static_cast<double>(pt1->val[1]);
1810  IG(j, i) += static_cast<double>(pt2->val[1]);
1811  IB(i, j) += static_cast<double>(pt1->val[0]);
1812  IB(j, i) += static_cast<double>(pt2->val[0]);
1813  }
1814  }
1815  }
1816  }
1817  }
1818 
1819  for(int i=0; i<num_images; ++i)
1820  {
1821  for(int j=i; j<num_images; ++j)
1822  {
1823  if(i == j)
1824  {
1825  if(N(i,j) == 0)
1826  {
1827  N(i,j) = 1;
1828  }
1829  }
1830  else if(N(i, j))
1831  {
1832  I(i, j) /= N(i, j);
1833  I(j, i) /= N(j, i);
1834 
1835  IR(i, j) /= N(i, j);
1836  IR(j, i) /= N(j, i);
1837  IG(i, j) /= N(i, j);
1838  IG(j, i) /= N(j, i);
1839  IB(i, j) /= N(i, j);
1840  IB(j, i) /= N(j, i);
1841  }
1842  }
1843  }
1844 
1845  cv::Mat_<double> A(num_images, num_images); A.setTo(0);
1846  cv::Mat_<double> b(num_images, 1); b.setTo(0);
1847  cv::Mat_<double> AR(num_images, num_images); AR.setTo(0);
1848  cv::Mat_<double> AG(num_images, num_images); AG.setTo(0);
1849  cv::Mat_<double> AB(num_images, num_images); AB.setTo(0);
1850  double alpha = 0.01;
1851  double beta = gainBeta;
1852  for (int i = 0; i < num_images; ++i)
1853  {
1854  for (int j = 0; j < num_images; ++j)
1855  {
1856  b(i, 0) += beta * N(i, j);
1857  A(i, i) += beta * N(i, j);
1858  AR(i, i) += beta * N(i, j);
1859  AG(i, i) += beta * N(i, j);
1860  AB(i, i) += beta * N(i, j);
1861  if (j == i) continue;
1862  A(i, i) += 2 * alpha * I(i, j) * I(i, j) * N(i, j);
1863  A(i, j) -= 2 * alpha * I(i, j) * I(j, i) * N(i, j);
1864 
1865  AR(i, i) += 2 * alpha * IR(i, j) * IR(i, j) * N(i, j);
1866  AR(i, j) -= 2 * alpha * IR(i, j) * IR(j, i) * N(i, j);
1867 
1868  AG(i, i) += 2 * alpha * IG(i, j) * IG(i, j) * N(i, j);
1869  AG(i, j) -= 2 * alpha * IG(i, j) * IG(j, i) * N(i, j);
1870 
1871  AB(i, i) += 2 * alpha * IB(i, j) * IB(i, j) * N(i, j);
1872  AB(i, j) -= 2 * alpha * IB(i, j) * IB(j, i) * N(i, j);
1873  }
1874  }
1875 
1876  cv::Mat_<double> gainsGray, gainsR, gainsG, gainsB;
1877  cv::solve(A, b, gainsGray);
1878 
1879  cv::solve(AR, b, gainsR);
1880  cv::solve(AG, b, gainsG);
1881  cv::solve(AB, b, gainsB);
1882 
1883  cv::Mat_<double> gains(gainsGray.rows, 4);
1884  gainsGray.copyTo(gains.col(0));
1885  gainsR.copyTo(gains.col(1));
1886  gainsG.copyTo(gains.col(2));
1887  gainsB.copyTo(gains.col(3));
1888 
1889  for(int t=0; t<(int)textures.size(); ++t)
1890  {
1891  //break;
1892  if(materialsKept.at(t))
1893  {
1894  int u = imageOrigin[t].x;
1895  int v = imageOrigin[t].y;
1896 
1897  UDEBUG("Gain cam%d = %f", newCamIndex[t], gainsGray(newCamIndex[t], 0));
1898 
1899  int indexMaterial = newCamIndex[t] / (cols*rows);
1900  cv::Mat roi = globalTextures(cv::Rect(u+indexMaterial*globalTextures.rows, v, emptyImage.cols, emptyImage.rows));
1901 
1902  std::vector<cv::Mat> channels;
1903  cv::split(roi, channels);
1904 
1905  // assuming BGR
1906  cv::multiply(channels[0], gains(newCamIndex[t], gainRGB?3:0), channels[0]);
1907  cv::multiply(channels[1], gains(newCamIndex[t], gainRGB?2:0), channels[1]);
1908  cv::multiply(channels[2], gains(newCamIndex[t], gainRGB?1:0), channels[2]);
1909 
1910  cv::merge(channels, roi);
1911 
1912  if(gainsOut)
1913  {
1914  cv::Vec4d g(
1915  gains(newCamIndex[t], 0),
1916  gains(newCamIndex[t], 1),
1917  gains(newCamIndex[t], 2),
1918  gains(newCamIndex[t], 3));
1919  if(gainsOut->find(textures[t].first) == gainsOut->end())
1920  {
1921  std::map<int,cv::Vec4d> value;
1922  value.insert(std::make_pair(textures[t].second, g));
1923  gainsOut->insert(std::make_pair(textures[t].first, value));
1924  }
1925  else
1926  {
1927  gainsOut->at(textures[t].first).insert(std::make_pair(textures[t].second, g));
1928  }
1929  }
1930  }
1931  }
1932  //UWARN("Saving gain.png", globalTexture);
1933  //cv::imwrite("gain.png", globalTexture);
1934  if(state) state->callback(uFormat("Gain compensation %fs", timer.ticks()));
1935  }
1936 
1937  if(blending)
1938  {
1939  // blending BGR
1940  int decimation = 1;
1941  if(blendingDecimation <= 0)
1942  {
1943  // determinate decimation to apply
1944  std::vector<float> edgeLengths;
1945  if(mesh.tex_coordinates.size() && mesh.tex_coordinates[0].size())
1946  {
1947  UASSERT(mesh.tex_polygons.size() && mesh.tex_polygons[0].size() && mesh.tex_polygons[0][0].vertices.size());
1948  int polygonSize = mesh.tex_polygons[0][0].vertices.size();
1949  UDEBUG("polygon size=%d", polygonSize);
1950 
1951  for(unsigned int k=0; k<mesh.tex_coordinates.size(); ++k)
1952  {
1953  for(unsigned int i=0; i<mesh.tex_coordinates[k].size(); i+=polygonSize)
1954  {
1955  for(int j=0; j<polygonSize; ++j)
1956  {
1957  const Eigen::Vector2f & uc1 = mesh.tex_coordinates[k][i + j];
1958  const Eigen::Vector2f & uc2 = mesh.tex_coordinates[k][i + (j+1)%polygonSize];
1959  Eigen::Vector2f edge = (uc1-uc2)*textureSize;
1960  edgeLengths.push_back(fabs(edge[0]));
1961  edgeLengths.push_back(fabs(edge[1]));
1962  }
1963  }
1964  }
1965  float edgeLength = 0.0f;
1966  if(edgeLengths.size())
1967  {
1968  std::sort(edgeLengths.begin(), edgeLengths.end());
1969  float m = uMean(edgeLengths.data(), edgeLengths.size());
1970  float stddev = std::sqrt(uVariance(edgeLengths.data(), edgeLengths.size(), m));
1971  edgeLength = m+stddev;
1972  decimation = 1 << 6;
1973  for(int i=1; i<=6; ++i)
1974  {
1975  if(float(1 << i) >= edgeLength)
1976  {
1977  decimation = 1 << i;
1978  break;
1979  }
1980  }
1981  }
1982 
1983  UDEBUG("edge length=%f decimation=%d", edgeLength, decimation);
1984  }
1985  }
1986  else
1987  {
1988  if(blendingDecimation > 1)
1989  {
1990  UASSERT(textureSize % blendingDecimation == 0);
1991  }
1992  decimation = blendingDecimation;
1993  UDEBUG("decimation=%d", decimation);
1994  }
1995 
1996  std::vector<cv::Mat> blendGains(materials);
1997  for(int i=0; i<materials;++i)
1998  {
1999  blendGains[i] = cv::Mat(globalTextures.rows/decimation, globalTextures.rows/decimation, CV_32FC3, cv::Scalar::all(1.0f));
2000  }
2001 
2002  for(unsigned int p=0; p<vertexToPixels.size(); ++p)
2003  {
2004  if(vertexToPixels[p].size() > 1)
2005  {
2006  std::vector<float> gainsB(vertexToPixels[p].size());
2007  std::vector<float> gainsG(vertexToPixels[p].size());
2008  std::vector<float> gainsR(vertexToPixels[p].size());
2009  float sumWeight = 0.0f;
2010  int k=0;
2011  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[p].begin(); iter!=vertexToPixels[p].end(); ++iter)
2012  {
2013  if(materialsKept.at(iter->first))
2014  {
2015  int u = iter->second.x*emptyImage.cols + imageOrigin[iter->first].x;
2016  int v = (1.0-iter->second.y)*emptyImage.rows + imageOrigin[iter->first].y;
2017  float x = iter->second.x - 0.5f;
2018  float y = iter->second.y - 0.5f;
2019  float weight = 0.7f - sqrt(x*x+y*y);
2020  if(weight<0.0f)
2021  {
2022  weight = 0.0f;
2023  }
2024  int indexMaterial = newCamIndex[iter->first] / (cols*rows);
2025  cv::Vec3b * pt = globalTextures.ptr<cv::Vec3b>(v,u+indexMaterial*globalTextures.rows);
2026  gainsB[k] = static_cast<double>(pt->val[0]) * weight;
2027  gainsG[k] = static_cast<double>(pt->val[1]) * weight;
2028  gainsR[k] = static_cast<double>(pt->val[2]) * weight;
2029  sumWeight += weight;
2030  ++k;
2031  }
2032  }
2033  gainsB.resize(k);
2034  gainsG.resize(k);
2035  gainsR.resize(k);
2036 
2037  if(sumWeight > 0)
2038  {
2039  float targetColor[3];
2040  targetColor[0] = uSum(gainsB.data(), gainsB.size()) / sumWeight;
2041  targetColor[1] = uSum(gainsG.data(), gainsG.size()) / sumWeight;
2042  targetColor[2] = uSum(gainsR.data(), gainsR.size()) / sumWeight;
2043  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[p].begin(); iter!=vertexToPixels[p].end(); ++iter)
2044  {
2045  if(materialsKept.at(iter->first))
2046  {
2047  int u = iter->second.x*emptyImage.cols + imageOrigin[iter->first].x;
2048  int v = (1.0-iter->second.y)*emptyImage.rows + imageOrigin[iter->first].y;
2049  int indexMaterial = newCamIndex[iter->first] / (cols*rows);
2050  cv::Vec3b * pt = globalTextures.ptr<cv::Vec3b>(v,u+indexMaterial*globalTextures.rows);
2051  float gB = targetColor[0]/(pt->val[0]==0?1.0f:pt->val[0]);
2052  float gG = targetColor[1]/(pt->val[1]==0?1.0f:pt->val[1]);
2053  float gR = targetColor[2]/(pt->val[2]==0?1.0f:pt->val[2]);
2054  cv::Vec3f * ptr = blendGains[indexMaterial].ptr<cv::Vec3f>(v/decimation, u/decimation);
2055  ptr->val[0] = (gB>1.3f)?1.3f:(gB<0.7f)?0.7f:gB;
2056  ptr->val[1] = (gG>1.3f)?1.3f:(gG<0.7f)?0.7f:gG;
2057  ptr->val[2] = (gR>1.3f)?1.3f:(gR<0.7f)?0.7f:gR;
2058  }
2059  }
2060  }
2061  }
2062  }
2063 
2064  if(blendingGainsOut)
2065  {
2066  for(int t=0; t<(int)textures.size(); ++t)
2067  {
2068  //break;
2069  if(materialsKept.at(t))
2070  {
2071  int u = imageOrigin[t].x/decimation;
2072  int v = imageOrigin[t].y/decimation;
2073 
2074  int indexMaterial = newCamIndex[t] / (cols*rows);
2075  cv::Mat roi = blendGains[indexMaterial](cv::Rect(u, v, emptyImage.cols/decimation, emptyImage.rows/decimation));
2076  if(blendingGainsOut->find(textures[t].first) == blendingGainsOut->end())
2077  {
2078  std::map<int,cv::Mat> value;
2079  value.insert(std::make_pair(textures[t].second, roi.clone()));
2080  blendingGainsOut->insert(std::make_pair(textures[t].first, value));
2081  }
2082  else
2083  {
2084  blendingGainsOut->at(textures[t].first).insert(std::make_pair(textures[t].second, roi.clone()));
2085  }
2086  }
2087  }
2088  }
2089 
2090  for(int i=0; i<materials; ++i)
2091  {
2092  /*std::vector<cv::Mat> channels;
2093  cv::split(blendGains, channels);
2094  cv::Mat img;
2095  channels[0].convertTo(img,CV_8U,128.0,0);
2096  cv::imwrite("blendSmallB.png", img);
2097  channels[1].convertTo(img,CV_8U,128.0,0);
2098  cv::imwrite("blendSmallG.png", img);
2099  channels[2].convertTo(img,CV_8U,128.0,0);
2100  cv::imwrite("blendSmallR.png", img);*/
2101 
2102  cv::Mat globalTexturesROI = globalTextures(cv::Range::all(), cv::Range(i*globalTextures.rows, (i+1)*globalTextures.rows));
2103  cv::Mat dst;
2104  cv::blur(blendGains[i], dst, cv::Size(3,3));
2105  cv::resize(dst, blendGains[i], globalTexturesROI.size(), 0, 0, cv::INTER_LINEAR);
2106 
2107  /*cv::split(blendGains, channels);
2108  channels[0].convertTo(img,CV_8U,128.0,0);
2109  cv::imwrite("blendFullB.png", img);
2110  channels[1].convertTo(img,CV_8U,128.0,0);
2111  cv::imwrite("blendFullG.png", img);
2112  channels[2].convertTo(img,CV_8U,128.0,0);
2113  cv::imwrite("blendFullR.png", img);*/
2114 
2115  cv::multiply(globalTexturesROI, blendGains[i], globalTexturesROI, 1.0, CV_8UC3);
2116 
2117  //UWARN("Saving blending.png", globalTexture);
2118  //cv::imwrite("blending.png", globalTexture);
2119  }
2120 
2121  if(state) state->callback(uFormat("Blending (decimation=%d) %fs", decimation, timer.ticks()));
2122  }
2123  }
2124 
2125  if(brightnessContrastRatioLow > 0 || brightnessContrastRatioHigh > 0)
2126  {
2127  if(exposureFusion)
2128  {
2129  std::vector<cv::Mat> images;
2130  images.push_back(globalTextures);
2131  if (brightnessContrastRatioLow > 0)
2132  {
2133  images.push_back(util2d::brightnessAndContrastAuto(
2134  globalTextures,
2135  globalTextureMasks,
2136  (float)brightnessContrastRatioLow,
2137  0.0f));
2138  }
2139  if (brightnessContrastRatioHigh > 0)
2140  {
2141  images.push_back(util2d::brightnessAndContrastAuto(
2142  globalTextures,
2143  globalTextureMasks,
2144  0.0f,
2145  (float)brightnessContrastRatioHigh));
2146  }
2147 
2148  globalTextures = util2d::exposureFusion(images);
2149  }
2150  else
2151  {
2152  float alpha, beta;
2153  globalTextures = util2d::brightnessAndContrastAuto(
2154  globalTextures,
2155  globalTextureMasks,
2156  (float)brightnessContrastRatioLow,
2157  (float)brightnessContrastRatioHigh,
2158  &alpha,
2159  &beta);
2160  if(contrastValuesOut)
2161  {
2162  contrastValuesOut->first = alpha;
2163  contrastValuesOut->second = beta;
2164  }
2165  }
2166  if(state) state->callback(uFormat("Brightness and contrast auto %fs", timer.ticks()));
2167  }
2168  }
2169  }
2170  }
2171  UDEBUG("globalTextures=%d", globalTextures.cols?globalTextures.cols / globalTextures.rows:0);
2172  return globalTextures;
2173 }
2174 
2175 void fixTextureMeshForVisualization(pcl::TextureMesh & textureMesh)
2176 {
2177  // VTK issue:
2178  // tex_coordinates should be linked to points, not
2179  // polygon vertices. Points linked to multiple different TCoords (different textures) should
2180  // be duplicated.
2181  for (unsigned int t = 0; t < textureMesh.tex_coordinates.size(); ++t)
2182  {
2183  if(textureMesh.tex_polygons[t].size())
2184  {
2185  pcl::PointCloud<pcl::PointXYZ>::Ptr originalCloud(new pcl::PointCloud<pcl::PointXYZ>);
2186  pcl::fromPCLPointCloud2(textureMesh.cloud, *originalCloud);
2187 
2188  // make a cloud with as many points than polygon vertices
2189  unsigned int nPoints = textureMesh.tex_coordinates[t].size();
2190  UASSERT(nPoints == textureMesh.tex_polygons[t].size()*textureMesh.tex_polygons[t][0].vertices.size()); // assuming polygon size is constant!
2191 
2192  pcl::PointCloud<pcl::PointXYZ>::Ptr newCloud(new pcl::PointCloud<pcl::PointXYZ>);
2193  newCloud->resize(nPoints);
2194 
2195  unsigned int oi = 0;
2196  for (unsigned int i = 0; i < textureMesh.tex_polygons[t].size(); ++i)
2197  {
2198  pcl::Vertices & vertices = textureMesh.tex_polygons[t][i];
2199 
2200  for(unsigned int j=0; j<vertices.vertices.size(); ++j)
2201  {
2202  UASSERT(oi < newCloud->size());
2203  UASSERT_MSG((size_t)vertices.vertices[j] < originalCloud->size(), uFormat("%d vs %d", vertices.vertices[j], (int)originalCloud->size()).c_str());
2204  newCloud->at(oi) = originalCloud->at(vertices.vertices[j]);
2205  vertices.vertices[j] = oi; // new vertex index
2206  ++oi;
2207  }
2208  }
2209  pcl::toPCLPointCloud2(*newCloud, textureMesh.cloud);
2210  }
2211  }
2212 }
2213 
2215  const std::string & outputOBJPath,
2216  const pcl::PCLPointCloud2 & cloud,
2217  const std::vector<pcl::Vertices> & polygons,
2218  const std::map<int, Transform> & cameraPoses,
2219  const std::vector<std::map<int, pcl::PointXY> > & vertexToPixels, // required output of util3d::createTextureMesh()
2220  const std::map<int, cv::Mat> & images, // raw or compressed, can be empty if memory or dbDriver should be used
2221  const std::map<int, std::vector<CameraModel> > & cameraModels, // Should match images
2222  const Memory * memory, // Should be set if images are not set
2223  const DBDriver * dbDriver, // Should be set if images and memory are not set
2224  int textureSize,
2225  const std::string & textureFormat,
2226  const std::map<int, std::map<int, cv::Vec4d> > & gains, // optional output of util3d::mergeTextures()
2227  const std::map<int, std::map<int, cv::Mat> > & blendingGains, // optional output of util3d::mergeTextures()
2228  const std::pair<float, float> & contrastValues, // optional output of util3d::mergeTextures()
2229  bool gainRGB)
2230 {
2231 #ifdef RTABMAP_ALICE_VISION
2233  {
2234  system::Logger::get()->setLogLevel(system::EVerboseLevel::Trace);
2235  }
2236  else if(ULogger::level() == ULogger::kInfo)
2237  {
2238  system::Logger::get()->setLogLevel(system::EVerboseLevel::Info);
2239  }
2240  else if(ULogger::level() == ULogger::kWarning)
2241  {
2242  system::Logger::get()->setLogLevel(system::EVerboseLevel::Warning);
2243  }
2244  else
2245  {
2246  system::Logger::get()->setLogLevel(system::EVerboseLevel::Error);
2247  }
2248 
2249  sfmData::SfMData sfmData;
2250  pcl::PointCloud<pcl::PointXYZRGB> cloud2;
2251  pcl::fromPCLPointCloud2(cloud, cloud2);
2252  UASSERT(vertexToPixels.size() == cloud2.size());
2253  UINFO("Input mesh: %d points %d polygons", (int)cloud2.size(), (int)polygons.size());
2254  mesh::Texturing texturing;
2255 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2256  texturing.mesh = new mesh::Mesh();
2257  texturing.mesh->pts.resize(cloud2.size());
2258  texturing.mesh->pointsVisibilities.resize(cloud2.size());
2259 #else
2260  texturing.me = new mesh::Mesh();
2261  texturing.me->pts = new StaticVector<Point3d>(cloud2.size());
2262  texturing.pointsVisibilities = new mesh::PointsVisibility();
2263  texturing.pointsVisibilities->reserve(cloud2.size());
2264 #endif
2265  texturing.texParams.textureSide = 8192;
2266  texturing.texParams.downscale = 8192/textureSize;
2267 
2268  for(size_t i=0;i<cloud2.size();++i)
2269  {
2270  pcl::PointXYZRGB pt = cloud2.at(i);
2271 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2272  texturing.mesh->pointsVisibilities[i].reserve(vertexToPixels[i].size());
2273  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[i].begin(); iter!=vertexToPixels[i].end();++iter)
2274  {
2275  texturing.mesh->pointsVisibilities[i].push_back(iter->first);
2276  }
2277  texturing.mesh->pts[i] = Point3d(pt.x, pt.y, pt.z);
2278 #else
2279  mesh::PointVisibility* pointVisibility = new mesh::PointVisibility();
2280  pointVisibility->reserve(vertexToPixels[i].size());
2281  for(std::map<int, pcl::PointXY>::const_iterator iter=vertexToPixels[i].begin(); iter!=vertexToPixels[i].end();++iter)
2282  {
2283  pointVisibility->push_back(iter->first);
2284  }
2285  texturing.pointsVisibilities->push_back(pointVisibility);
2286  (*texturing.me->pts)[i] = Point3d(pt.x, pt.y, pt.z);
2287 #endif
2288  }
2289 
2290 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2291  texturing.mesh->tris.resize(polygons.size());
2292  texturing.mesh->trisMtlIds().resize(polygons.size());
2293 #else
2294  texturing.me->tris = new StaticVector<mesh::Mesh::triangle>(polygons.size());
2295 #endif
2296  for(size_t i=0;i<polygons.size();++i)
2297  {
2298  UASSERT(polygons[i].vertices.size() == 3);
2299 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2300  texturing.mesh->trisMtlIds()[i] = -1;
2301  texturing.mesh->tris[i] = mesh::Mesh::triangle(
2302 #else
2303  (*texturing.me->tris)[i] = mesh::Mesh::triangle(
2304 #endif
2305  polygons[i].vertices[0],
2306  polygons[i].vertices[1],
2307  polygons[i].vertices[2]);
2308  }
2309  UTimer timer;
2310  std::string outputDirectory = UDirectory::getDir(outputOBJPath);
2311  std::string tmpImageDirectory = outputDirectory+"/rtabmap_tmp_textures";
2312  UDirectory::removeDir(tmpImageDirectory);
2313  UDirectory::makeDir(tmpImageDirectory);
2314  UINFO("Temporary saving images in directory \"%s\"...", tmpImageDirectory.c_str());
2315  int viewId = 0;
2316  for(std::map<int, Transform>::const_iterator iter = cameraPoses.lower_bound(1); iter!=cameraPoses.end(); ++iter)
2317  {
2318  int camId = iter->first;
2319  cv::Mat image;
2320  std::vector<CameraModel> models;
2321 
2322  if( images.find(camId) != images.end() &&
2323  !images.find(camId)->second.empty() &&
2324  cameraModels.find(camId) != cameraModels.end())
2325  {
2326  image = images.find(camId)->second;
2327  models = cameraModels.find(camId)->second;
2328  }
2329  else if(memory)
2330  {
2331  SensorData data = memory->getNodeData(camId, true, false, false, false);
2332  models = data.cameraModels();
2333  if(models.empty() && data.stereoCameraModel().isValidForProjection())
2334  {
2335  models.push_back(data.stereoCameraModel().left());
2336  }
2337  if(data.imageRaw().empty())
2338  {
2339  image = data.imageCompressed();
2340  }
2341  else
2342  {
2343  image = data.imageRaw();
2344  }
2345  }
2346  else if(dbDriver)
2347  {
2348  StereoCameraModel stereoModel;
2349  dbDriver->getCalibration(camId, models, stereoModel);
2350  if(models.empty() && stereoModel.isValidForProjection())
2351  {
2352  models.push_back(stereoModel.left());
2353  }
2354 
2355  SensorData data;
2356  dbDriver->getNodeData(camId, data, true, false, false, false);
2357  if(data.imageRaw().empty())
2358  {
2359  image = data.imageCompressed();
2360  }
2361  else
2362  {
2363  image = data.imageRaw();
2364  }
2365  }
2366  if(models.empty())
2367  {
2368  UERROR("No camera models found for camera %d. Aborting multiband texturing...", iter->first);
2369  return false;
2370  }
2371  if(image.empty())
2372  {
2373  UERROR("No image found for camera %d. Aborting multiband texturing...", iter->first);
2374  return false;
2375  }
2376 
2377  if(image.rows == 1 && image.type() == CV_8UC1)
2378  {
2379  image = uncompressImage(image);
2380  }
2381  else
2382  {
2383  image = image.clone();
2384  }
2385 
2386  for(size_t i=0; i<models.size(); ++i)
2387  {
2388  const CameraModel & model = models.at(i);
2389  cv::Size imageSize = model.imageSize();
2390  if(imageSize.height == 0)
2391  {
2392  // backward compatibility
2393  imageSize.height = image.rows;
2394  imageSize.width = image.cols;
2395  }
2396  UASSERT(image.cols % imageSize.width == 0);
2397  cv::Mat imageRoi = image.colRange(i*imageSize.width, (i+1)*imageSize.width);
2398  if(gains.find(camId) != gains.end() &&
2399  gains.at(camId).find(i) != gains.at(camId).end())
2400  {
2401  const cv::Vec4d & g = gains.at(camId).at(i);
2402  std::vector<cv::Mat> channels;
2403  cv::split(imageRoi, channels);
2404 
2405  // assuming BGR
2406  cv::multiply(channels[0], g.val[gainRGB?3:0], channels[0]);
2407  cv::multiply(channels[1], g.val[gainRGB?2:0], channels[1]);
2408  cv::multiply(channels[2], g.val[gainRGB?1:0], channels[2]);
2409 
2410  cv::Mat output;
2411  cv::merge(channels, output);
2412  imageRoi = output;
2413  }
2414 
2415  if(blendingGains.find(camId) != blendingGains.end() &&
2416  blendingGains.at(camId).find(i) != blendingGains.at(camId).end())
2417  {
2418  cv::Mat g = blendingGains.at(camId).at(i);
2419  cv::Mat dst;
2420  cv::blur(g, dst, cv::Size(3,3));
2421  cv::Mat gResized;
2422  cv::resize(dst, gResized, imageRoi.size(), 0, 0, cv::INTER_LINEAR);
2423  cv::Mat output;
2424  cv::multiply(imageRoi, gResized, output, 1.0, CV_8UC3);
2425  imageRoi = output;
2426  }
2427 
2428  Transform t = iter->second * model.localTransform();
2429  Eigen::Matrix<double, 3, 4> m = (t.inverse()).toEigen3d().matrix().block<3,4>(0, 0);
2430  sfmData::CameraPose pose(geometry::Pose3(m), true);
2431  sfmData.setAbsolutePose((IndexT)viewId, pose);
2432 
2433  std::shared_ptr<camera::IntrinsicBase> camPtr = std::make_shared<camera::Pinhole>(
2434  imageSize.width, imageSize.height, model.fx(), model.cx(), model.cy());
2435  sfmData.intrinsics.insert(std::make_pair((IndexT)viewId, camPtr));
2436 
2437  std::string imagePath = tmpImageDirectory+uFormat("/%d.jpg", viewId);
2438 
2439  cv::imwrite(imagePath, imageRoi);
2440 
2441  std::shared_ptr<sfmData::View> viewPtr = std::make_shared<sfmData::View>(
2442  imagePath,
2443  (IndexT)viewId,
2444  (IndexT)viewId,
2445  (IndexT)viewId,
2446  imageSize.width,
2447  imageSize.height);
2448  sfmData.views.insert(std::make_pair((IndexT)viewId, viewPtr));
2449  ++viewId;
2450  }
2451  }
2452  UINFO("Temporary saving images in directory \"%s\"... done (%d images). %fs", tmpImageDirectory.c_str(), viewId, (int)cameraPoses.size(), timer.ticks());
2453 
2454  mvsUtils::MultiViewParams mp(sfmData);
2455 
2456  UINFO("Unwrapping...");
2457  texturing.unwrap(mp, mesh::EUnwrapMethod::Basic);
2458  UINFO("Unwrapping done. %fs", timer.ticks());
2459 
2460  // save final obj file
2461  std::string baseName = uSplit(UFile::getName(outputOBJPath), '.').front();
2462  texturing.saveAsOBJ(outputDirectory, baseName);
2463  UINFO("Saved %s. %fs", outputOBJPath, timer.ticks());
2464 
2465  // generate textures
2466  UINFO("Generating textures...");
2467  texturing.generateTextures(mp, outputDirectory);
2468  UINFO("Generating textures done. %fs", timer.ticks());
2469 
2470  UINFO("Cleanup temporary directory \"%s\"...", tmpImageDirectory.c_str());
2471  UDirectory dir(tmpImageDirectory);
2472  std::string fp = dir.getNextFilePath();
2473  while(!fp.empty())
2474  {
2475  UFile::erase(fp);
2476  fp = dir.getNextFilePath();
2477  }
2478  UDirectory::removeDir(tmpImageDirectory);
2479  UINFO("Cleanup temporary directory \"%s\"... done.", tmpImageDirectory.c_str());
2480 
2481  UINFO("Rename/convert textures...");
2482  dir.setPath(outputDirectory, "png");
2483  std::map<std::string, std::string> texNames; // <old, new>
2484  std::string outputFormat = textureFormat;
2485  if(outputFormat.front() == '.')
2486  {
2487  outputFormat = outputFormat.substr(1, std::string::npos);
2488  }
2489  for(std::list<std::string>::const_iterator iter=dir.getFileNames().begin(); iter!=dir.getFileNames().end(); ++iter)
2490  {
2491  // Textures are called "texture_1001.png", "texture_1002.png", ...
2492  if(uStrContains(*iter, "texture_10"))
2493  {
2494  cv::Mat img = cv::imread(outputDirectory+"/"+*iter);
2495  if(contrastValues.first != 0.0f || contrastValues.second != 0.0f)
2496  {
2497  UASSERT(img.channels() == 3);
2498  // Re-use same contrast values with all images
2499  UINFO("Apply contrast values %f %f", contrastValues.first, contrastValues.second);
2500  img.convertTo(img, -1, contrastValues.first, contrastValues.second);
2501  }
2502  std::string newName = *iter;
2503  boost::replace_all(newName, "png", outputFormat);
2504  boost::replace_all(newName, "texture", baseName);
2505  texNames.insert(std::make_pair(*iter, newName));
2506  cv::imwrite(outputDirectory+"/"+newName, img);
2507  UFile::erase(outputDirectory+"/"+*iter);
2508  }
2509  }
2510  std::ifstream fi(outputDirectory+"/"+baseName+".mtl");
2511  std::string mtlStr((std::istreambuf_iterator<char>(fi)),
2512  std::istreambuf_iterator<char>());
2513  fi.close();
2514  UFile::erase(outputDirectory+"/"+baseName);
2515  for(std::map<std::string, std::string>::iterator iter=texNames.begin(); iter!=texNames.end(); ++iter)
2516  {
2517  boost::replace_all(mtlStr, iter->first, iter->second);
2518  }
2519  std::ofstream fo(outputDirectory+"/"+baseName+".mtl");
2520  fo.write(mtlStr.c_str(), mtlStr.size());
2521  fo.close();
2522  UINFO("Rename/convert textures... done. %fs", timer.ticks());
2523 
2524 #if RTABMAP_ALICE_VISION_MAJOR > 2 || (RTABMAP_ALICE_VISION_MAJOR==2 && RTABMAP_ALICE_VISION_MINOR>=3)
2525  sfmData.clear();
2526 #endif
2527 
2528  return true;
2529 #else
2530  UERROR("Cannot unwrap texture mesh. RTAB-Map is not built with Alice Vision support! Returning false.");
2531  return false;
2532 #endif
2533 }
2534 
2536  const LaserScan & laserScan,
2537  int searchK,
2538  float searchRadius)
2539 {
2540  if(laserScan.isEmpty())
2541  {
2542  return laserScan;
2543  }
2544 
2545  pcl::PointCloud<pcl::Normal>::Ptr normals;
2546  // convert to compatible PCL format and filter it
2547  if(laserScan.hasRGB())
2548  {
2549  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = laserScanToPointCloudRGB(laserScan);
2550  if(cloud->size())
2551  {
2552  UASSERT(!laserScan.is2d());
2553  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, searchK, searchRadius);
2554  return LaserScan(laserScanFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), LaserScan::kXYZRGBNormal, laserScan.localTransform());
2555  }
2556  }
2557  else if(laserScan.hasIntensity())
2558  {
2559  pcl::PointCloud<pcl::PointXYZI>::Ptr cloud = laserScanToPointCloudI(laserScan);
2560  if(cloud->size())
2561  {
2562  if(laserScan.is2d())
2563  {
2564  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals2D(cloud, searchK, searchRadius);
2565  if(laserScan.angleIncrement() > 0.0f)
2566  {
2567  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), LaserScan::kXYINormal, laserScan.rangeMin(), laserScan.rangeMax(), laserScan.angleMin(), laserScan.angleMax(), laserScan.angleIncrement(), laserScan.localTransform());
2568  }
2569  else
2570  {
2571  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), LaserScan::kXYINormal, laserScan.localTransform());
2572  }
2573 
2574  }
2575  else
2576  {
2577  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, searchK, searchRadius);
2578  return LaserScan(laserScanFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), LaserScan::kXYZINormal, laserScan.localTransform());
2579  }
2580  }
2581  }
2582  else
2583  {
2584  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = laserScanToPointCloud(laserScan);
2585  if(cloud->size())
2586  {
2587  if(laserScan.is2d())
2588  {
2589  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals2D(cloud, searchK, searchRadius);
2590  if(laserScan.angleIncrement() > 0.0f)
2591  {
2592  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), LaserScan::kXYNormal, laserScan.rangeMin(), laserScan.rangeMax(), laserScan.angleMin(), laserScan.angleMax(), laserScan.angleIncrement(), laserScan.localTransform());
2593  }
2594  else
2595  {
2596  return LaserScan(laserScan2dFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), LaserScan::kXYNormal, laserScan.localTransform());
2597  }
2598  }
2599  else
2600  {
2601  pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, searchK, searchRadius);
2602  return LaserScan(laserScanFromPointCloud(*cloud, *normals), laserScan.maxPoints(), laserScan.rangeMax(), LaserScan::kXYZNormal, laserScan.localTransform());
2603  }
2604  }
2605  }
2606  return LaserScan();
2607 }
2608 
2609 template<typename PointT>
2610 pcl::PointCloud<pcl::Normal>::Ptr computeNormalsImpl(
2611  const typename pcl::PointCloud<PointT>::Ptr & cloud,
2612  const pcl::IndicesPtr & indices,
2613  int searchK,
2614  float searchRadius,
2615  const Eigen::Vector3f & viewPoint)
2616 {
2617  typename pcl::search::KdTree<PointT>::Ptr tree (new pcl::search::KdTree<PointT>);
2618  if(indices->size())
2619  {
2620  tree->setInputCloud(cloud, indices);
2621  }
2622  else
2623  {
2624  tree->setInputCloud (cloud);
2625  }
2626 
2627  // Normal estimation*
2628 #ifdef PCL_OMP
2629  pcl::NormalEstimationOMP<PointT, pcl::Normal> n;
2630 #else
2631  pcl::NormalEstimation<PointT, pcl::Normal> n;
2632 #endif
2633  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
2634  n.setInputCloud (cloud);
2635  // Commented: Keep the output normals size the same as the input cloud
2636  //if(indices->size())
2637  //{
2638  // n.setIndices(indices);
2639  //}
2640  n.setSearchMethod (tree);
2641  n.setKSearch (searchK);
2642  n.setRadiusSearch (searchRadius);
2643  n.setViewPoint(viewPoint[0], viewPoint[1], viewPoint[2]);
2644  n.compute (*normals);
2645 
2646  return normals;
2647 }
2648 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2649  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
2650  int searchK,
2651  float searchRadius,
2652  const Eigen::Vector3f & viewPoint)
2653 {
2654  pcl::IndicesPtr indices(new std::vector<int>);
2655  return computeNormals(cloud, indices, searchK, searchRadius, viewPoint);
2656 }
2657 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2658  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
2659  int searchK,
2660  float searchRadius,
2661  const Eigen::Vector3f & viewPoint)
2662 {
2663  pcl::IndicesPtr indices(new std::vector<int>);
2664  return computeNormals(cloud, indices, searchK, searchRadius, viewPoint);
2665 }
2666 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2667  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
2668  int searchK,
2669  float searchRadius,
2670  const Eigen::Vector3f & viewPoint)
2671 {
2672  pcl::IndicesPtr indices(new std::vector<int>);
2673  return computeNormals(cloud, indices, searchK, searchRadius, viewPoint);
2674 }
2675 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2676  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
2677  const pcl::IndicesPtr & indices,
2678  int searchK,
2679  float searchRadius,
2680  const Eigen::Vector3f & viewPoint)
2681 {
2682  return computeNormalsImpl<pcl::PointXYZ>(cloud, indices, searchK, searchRadius, viewPoint);
2683 }
2684 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2685  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
2686  const pcl::IndicesPtr & indices,
2687  int searchK,
2688  float searchRadius,
2689  const Eigen::Vector3f & viewPoint)
2690 {
2691  return computeNormalsImpl<pcl::PointXYZRGB>(cloud, indices, searchK, searchRadius, viewPoint);
2692 }
2693 pcl::PointCloud<pcl::Normal>::Ptr computeNormals(
2694  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
2695  const pcl::IndicesPtr & indices,
2696  int searchK,
2697  float searchRadius,
2698  const Eigen::Vector3f & viewPoint)
2699 {
2700  return computeNormalsImpl<pcl::PointXYZI>(cloud, indices, searchK, searchRadius, viewPoint);
2701 }
2702 
2703 template<typename PointT>
2704 pcl::PointCloud<pcl::Normal>::Ptr computeNormals2DImpl(
2705  const typename pcl::PointCloud<PointT>::Ptr & cloud,
2706  int searchK,
2707  float searchRadius,
2708  const Eigen::Vector3f & viewPoint)
2709 {
2710  UASSERT(searchK>0 || searchRadius>0.0f);
2711  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
2712 
2713  typename pcl::search::KdTree<PointT>::Ptr tree (new pcl::search::KdTree<PointT>);
2714  tree->setInputCloud (cloud);
2715 
2716  normals->resize(cloud->size());
2717 
2718  float bad_point = std::numeric_limits<float>::quiet_NaN ();
2719 
2720  // assuming that points are ordered
2721  for(unsigned int i=0; i<cloud->size(); ++i)
2722  {
2723  const PointT & pt = cloud->at(i);
2724  std::vector<Eigen::Vector3f> neighborNormals;
2725  Eigen::Vector3f direction;
2726  direction[0] = viewPoint[0] - pt.x;
2727  direction[1] = viewPoint[1] - pt.y;
2728  direction[2] = viewPoint[2] - pt.z;
2729 
2730  std::vector<int> k_indices;
2731  std::vector<float> k_sqr_distances;
2732  if(searchRadius>0.0f)
2733  {
2734  tree->radiusSearch(cloud->at(i), searchRadius, k_indices, k_sqr_distances, searchK);
2735  }
2736  else
2737  {
2738  tree->nearestKSearch(cloud->at(i), searchK, k_indices, k_sqr_distances);
2739  }
2740 
2741  for(unsigned int j=0; j<k_indices.size(); ++j)
2742  {
2743  if(k_indices.at(j) != (int)i)
2744  {
2745  const PointT & pt2 = cloud->at(k_indices.at(j));
2746  Eigen::Vector3f v(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
2747  Eigen::Vector3f up = v.cross(direction);
2748  Eigen::Vector3f n = up.cross(v);
2749  n.normalize();
2750  neighborNormals.push_back(n);
2751  }
2752  }
2753 
2754  if(neighborNormals.empty())
2755  {
2756  normals->at(i).normal_x = bad_point;
2757  normals->at(i).normal_y = bad_point;
2758  normals->at(i).normal_z = bad_point;
2759  }
2760  else
2761  {
2762  Eigen::Vector3f meanNormal(0,0,0);
2763  for(unsigned int j=0; j<neighborNormals.size(); ++j)
2764  {
2765  meanNormal+=neighborNormals[j];
2766  }
2767  meanNormal /= (float)neighborNormals.size();
2768  meanNormal.normalize();
2769  normals->at(i).normal_x = meanNormal[0];
2770  normals->at(i).normal_y = meanNormal[1];
2771  normals->at(i).normal_z = meanNormal[2];
2772  }
2773  }
2774 
2775  return normals;
2776 }
2777 pcl::PointCloud<pcl::Normal>::Ptr computeNormals2D(
2778  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
2779  int searchK,
2780  float searchRadius,
2781  const Eigen::Vector3f & viewPoint)
2782 {
2783  return computeNormals2DImpl<pcl::PointXYZ>(cloud, searchK, searchRadius, viewPoint);
2784 }
2785 pcl::PointCloud<pcl::Normal>::Ptr computeNormals2D(
2786  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
2787  int searchK,
2788  float searchRadius,
2789  const Eigen::Vector3f & viewPoint)
2790 {
2791  return computeNormals2DImpl<pcl::PointXYZI>(cloud, searchK, searchRadius, viewPoint);
2792 }
2793 
2794 template<typename PointT>
2795 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals2DImpl(
2796  const typename pcl::PointCloud<PointT>::Ptr & cloud,
2797  int searchK,
2798  float searchRadius,
2799  const Eigen::Vector3f & viewPoint)
2800 {
2801  UASSERT(searchK>0);
2802  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
2803 
2804  normals->resize(cloud->size());
2805  searchRadius *= searchRadius; // squared distance
2806 
2807  float bad_point = std::numeric_limits<float>::quiet_NaN ();
2808 
2809  // assuming that points are ordered
2810  for(int i=0; i<(int)cloud->size(); ++i)
2811  {
2812  int li = i-searchK;
2813  if(li<0)
2814  {
2815  li=0;
2816  }
2817  int hi = i+searchK;
2818  if(hi>=(int)cloud->size())
2819  {
2820  hi=(int)cloud->size()-1;
2821  }
2822 
2823  // get points before not too far
2824  const PointT & pt = cloud->at(i);
2825  std::vector<Eigen::Vector3f> neighborNormals;
2826  Eigen::Vector3f direction;
2827  direction[0] = viewPoint[0] - cloud->at(i).x;
2828  direction[1] = viewPoint[1] - cloud->at(i).y;
2829  direction[2] = viewPoint[2] - cloud->at(i).z;
2830  for(int j=i-1; j>=li; --j)
2831  {
2832  const PointT & pt2 = cloud->at(j);
2833  Eigen::Vector3f vd(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
2834  if(searchRadius<=0.0f || (vd[0]*vd[0] + vd[1]*vd[1] + vd[2]*vd[2]) < searchRadius)
2835  {
2836  Eigen::Vector3f v(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
2837  Eigen::Vector3f up = v.cross(direction);
2838  Eigen::Vector3f n = up.cross(v);
2839  n.normalize();
2840  neighborNormals.push_back(n);
2841  }
2842  else
2843  {
2844  break;
2845  }
2846  }
2847  for(int j=i+1; j<=hi; ++j)
2848  {
2849  const PointT & pt2 = cloud->at(j);
2850  Eigen::Vector3f vd(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
2851  if(searchRadius<=0.0f || (vd[0]*vd[0] + vd[1]*vd[1] + vd[2]*vd[2]) < searchRadius)
2852  {
2853  Eigen::Vector3f v(pt2.x-pt.x, pt2.y - pt.y, pt2.z - pt.z);
2854  Eigen::Vector3f up = v[2]==0.0f?Eigen::Vector3f(0,0,1):v.cross(direction);
2855  Eigen::Vector3f n = up.cross(v);
2856  n.normalize();
2857  neighborNormals.push_back(n);
2858  }
2859  else
2860  {
2861  break;
2862  }
2863  }
2864 
2865  if(neighborNormals.empty())
2866  {
2867  normals->at(i).normal_x = bad_point;
2868  normals->at(i).normal_y = bad_point;
2869  normals->at(i).normal_z = bad_point;
2870  }
2871  else
2872  {
2873  Eigen::Vector3f meanNormal(0,0,0);
2874  for(unsigned int j=0; j<neighborNormals.size(); ++j)
2875  {
2876  meanNormal+=neighborNormals[j];
2877  }
2878  meanNormal /= (float)neighborNormals.size();
2879  meanNormal.normalize();
2880  normals->at(i).normal_x = meanNormal[0];
2881  normals->at(i).normal_y = meanNormal[1];
2882  normals->at(i).normal_z = meanNormal[2];
2883  }
2884  }
2885 
2886  return normals;
2887 }
2888 
2889 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals2D(
2890  const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud,
2891  int searchK,
2892  float searchRadius,
2893  const Eigen::Vector3f & viewPoint)
2894 {
2895  return computeFastOrganizedNormals2DImpl<pcl::PointXYZ>(cloud, searchK, searchRadius, viewPoint);
2896 }
2897 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals2D(
2898  const pcl::PointCloud<pcl::PointXYZI>::Ptr & cloud,
2899  int searchK,
2900  float searchRadius,
2901  const Eigen::Vector3f & viewPoint)
2902 {
2903  return computeFastOrganizedNormals2DImpl<pcl::PointXYZI>(cloud, searchK, searchRadius, viewPoint);
2904 }
2905 
2906 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals(
2907  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
2908  float maxDepthChangeFactor,
2909  float normalSmoothingSize,
2910  const Eigen::Vector3f & viewPoint)
2911 {
2912  pcl::IndicesPtr indices(new std::vector<int>);
2913  return computeFastOrganizedNormals(cloud, indices, maxDepthChangeFactor, normalSmoothingSize, viewPoint);
2914 }
2915 pcl::PointCloud<pcl::Normal>::Ptr computeFastOrganizedNormals(
2916  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
2917  const pcl::IndicesPtr & indices,
2918  float maxDepthChangeFactor,
2919  float normalSmoothingSize,
2920  const Eigen::Vector3f & viewPoint)
2921 {
2922  UASSERT(cloud->isOrganized());
2923 
2924  pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB>);
2925  if(indices->size())
2926  {
2927  tree->setInputCloud(cloud, indices);
2928  }
2929  else
2930  {
2931  tree->setInputCloud (cloud);
2932  }
2933 
2934  // Normal estimation
2935  pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
2936  pcl::IntegralImageNormalEstimation<pcl::PointXYZRGB, pcl::Normal> ne;
2937  ne.setNormalEstimationMethod (ne.AVERAGE_3D_GRADIENT);
2938  ne.setMaxDepthChangeFactor(maxDepthChangeFactor);
2939  ne.setNormalSmoothingSize(normalSmoothingSize);
2940  ne.setBorderPolicy(ne.BORDER_POLICY_MIRROR);
2941  ne.setInputCloud(cloud);
2942  // Commented: Keep the output normals size the same as the input cloud
2943  //if(indices->size())
2944  //{
2945  // ne.setIndices(indices);
2946  //}
2947  ne.setSearchMethod(tree);
2948  ne.setViewPoint(viewPoint[0], viewPoint[1], viewPoint[2]);
2949  ne.compute(*normals);
2950 
2951  return normals;
2952 }
2953 
2955  const LaserScan & scan,
2956  const Transform & t,
2957  cv::Mat * pcaEigenVectors,
2958  cv::Mat * pcaEigenValues)
2959 {
2960  if(!scan.isEmpty() && (scan.hasNormals()))
2961  {
2962  //Construct a buffer used by the pca analysis
2963  int sz = static_cast<int>(scan.size()*2);
2964  bool is2d = scan.is2d();
2965  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
2966  int oi = 0;
2967  int nOffset = scan.getNormalsOffset();
2968  bool doTransform = false;
2969  Transform tn;
2970  if(!t.isIdentity() || !scan.localTransform().isIdentity())
2971  {
2972  tn = (t*scan.localTransform()).rotation();
2973  doTransform = true;
2974  }
2975  for (int i = 0; i < scan.size(); ++i)
2976  {
2977  const float * ptrScan = scan.data().ptr<float>(0, i);
2978 
2979  if(is2d)
2980  {
2981  if(uIsFinite(ptrScan[nOffset]) && uIsFinite(ptrScan[nOffset+1]))
2982  {
2983  cv::Point3f n(ptrScan[nOffset], ptrScan[nOffset+1], 0);
2984  if(doTransform)
2985  {
2986  n = util3d::transformPoint(n, tn);
2987  }
2988  float * ptr = data_normals.ptr<float>(oi++, 0);
2989  ptr[0] = n.x;
2990  ptr[1] = n.y;
2991  }
2992  }
2993  else
2994  {
2995  if(uIsFinite(ptrScan[nOffset]) && uIsFinite(ptrScan[nOffset+1]) && uIsFinite(ptrScan[nOffset+2]))
2996  {
2997  cv::Point3f n(ptrScan[nOffset], ptrScan[nOffset+1], ptrScan[nOffset+2]);
2998  if(doTransform)
2999  {
3000  n = util3d::transformPoint(n, tn);
3001  }
3002  float * ptr = data_normals.ptr<float>(oi++, 0);
3003  ptr[0] = n.x;
3004  ptr[1] = n.y;
3005  ptr[2] = n.z;
3006  }
3007  }
3008  }
3009  if(oi>1)
3010  {
3011  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3012 
3013  if(pcaEigenVectors)
3014  {
3015  *pcaEigenVectors = pca_analysis.eigenvectors;
3016  }
3017  if(pcaEigenValues)
3018  {
3019  *pcaEigenValues = pca_analysis.eigenvalues;
3020  }
3021  UASSERT((is2d && pca_analysis.eigenvalues.total()>=2) || (!is2d && pca_analysis.eigenvalues.total()>=3));
3022  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3023  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3024  }
3025  }
3026  else if(!scan.isEmpty())
3027  {
3028  UERROR("Scan doesn't have normals!");
3029  }
3030  return 0.0f;
3031 }
3032 
3034  const pcl::PointCloud<pcl::PointNormal> & cloud,
3035  const Transform & t,
3036  bool is2d,
3037  cv::Mat * pcaEigenVectors,
3038  cv::Mat * pcaEigenValues)
3039 {
3040  //Construct a buffer used by the pca analysis
3041  int sz = static_cast<int>(cloud.size()*2);
3042  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3043  int oi = 0;
3044  bool doTransform = false;
3045  Transform tn;
3046  if(!t.isIdentity() && !t.isNull())
3047  {
3048  tn = t.rotation();
3049  doTransform = true;
3050  }
3051  for (unsigned int i = 0; i < cloud.size(); ++i)
3052  {
3053  const pcl::PointNormal & pt = cloud.at(i);
3054  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3055  if(doTransform)
3056  {
3057  n = util3d::transformPoint(n, tn);
3058  }
3059  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3060  {
3061  float * ptr = data_normals.ptr<float>(oi++, 0);
3062  ptr[0] = n.x;
3063  ptr[1] = n.y;
3064  if(!is2d)
3065  {
3066  ptr[2] = n.z;
3067  }
3068  }
3069  }
3070  if(oi>1)
3071  {
3072  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3073 
3074  if(pcaEigenVectors)
3075  {
3076  *pcaEigenVectors = pca_analysis.eigenvectors;
3077  }
3078  if(pcaEigenValues)
3079  {
3080  *pcaEigenValues = pca_analysis.eigenvalues;
3081  }
3082 
3083  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3084  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3085  }
3086  return 0.0f;
3087 }
3088 
3090  const pcl::PointCloud<pcl::Normal> & normals,
3091  const Transform & t,
3092  bool is2d,
3093  cv::Mat * pcaEigenVectors,
3094  cv::Mat * pcaEigenValues)
3095 {
3096  //Construct a buffer used by the pca analysis
3097  int sz = static_cast<int>(normals.size()*2);
3098  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3099  int oi = 0;
3100  bool doTransform = false;
3101  Transform tn;
3102  if(!t.isIdentity())
3103  {
3104  tn = t.rotation();
3105  doTransform = true;
3106  }
3107  for (unsigned int i = 0; i < normals.size(); ++i)
3108  {
3109  const pcl::Normal & pt = normals.at(i);
3110  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3111  if(doTransform)
3112  {
3113  n = util3d::transformPoint(n, tn);
3114  }
3115  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3116  {
3117  float * ptr = data_normals.ptr<float>(oi++, 0);
3118  ptr[0] = n.x;
3119  ptr[1] = n.y;
3120  if(!is2d)
3121  {
3122  ptr[2] = n.z;
3123  }
3124  }
3125  }
3126  if(oi>1)
3127  {
3128  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3129 
3130  if(pcaEigenVectors)
3131  {
3132  *pcaEigenVectors = pca_analysis.eigenvectors;
3133  }
3134  if(pcaEigenValues)
3135  {
3136  *pcaEigenValues = pca_analysis.eigenvalues;
3137  }
3138 
3139  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3140  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3141  }
3142  return 0.0f;
3143 }
3144 
3146  const pcl::PointCloud<pcl::PointXYZINormal> & cloud,
3147  const Transform & t,
3148  bool is2d,
3149  cv::Mat * pcaEigenVectors,
3150  cv::Mat * pcaEigenValues)
3151 {
3152  //Construct a buffer used by the pca analysis
3153  int sz = static_cast<int>(cloud.size()*2);
3154  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3155  int oi = 0;
3156  bool doTransform = false;
3157  Transform tn;
3158  if(!t.isIdentity())
3159  {
3160  tn = t.rotation();
3161  doTransform = true;
3162  }
3163  for (unsigned int i = 0; i < cloud.size(); ++i)
3164  {
3165  const pcl::PointXYZINormal & pt = cloud.at(i);
3166  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3167  if(doTransform)
3168  {
3169  n = util3d::transformPoint(n, tn);
3170  }
3171  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3172  {
3173  float * ptr = data_normals.ptr<float>(oi++, 0);
3174  ptr[0] = n.x;
3175  ptr[1] = n.y;
3176  if(!is2d)
3177  {
3178  ptr[2] = n.z;
3179  }
3180  }
3181  }
3182  if(oi>1)
3183  {
3184  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3185 
3186  if(pcaEigenVectors)
3187  {
3188  *pcaEigenVectors = pca_analysis.eigenvectors;
3189  }
3190  if(pcaEigenValues)
3191  {
3192  *pcaEigenValues = pca_analysis.eigenvalues;
3193  }
3194 
3195  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3196  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3197  }
3198  return 0.0f;
3199 }
3200 
3202  const pcl::PointCloud<pcl::PointXYZRGBNormal> & cloud,
3203  const Transform & t,
3204  bool is2d,
3205  cv::Mat * pcaEigenVectors,
3206  cv::Mat * pcaEigenValues)
3207 {
3208  //Construct a buffer used by the pca analysis
3209  int sz = static_cast<int>(cloud.size()*2);
3210  cv::Mat data_normals = cv::Mat::zeros(sz, is2d?2:3, CV_32FC1);
3211  int oi = 0;
3212  bool doTransform = false;
3213  Transform tn;
3214  if(!t.isIdentity())
3215  {
3216  tn = t.rotation();
3217  doTransform = true;
3218  }
3219  for (unsigned int i = 0; i < cloud.size(); ++i)
3220  {
3221  const pcl::PointXYZRGBNormal & pt = cloud.at(i);
3222  cv::Point3f n(pt.normal_x, pt.normal_y, pt.normal_z);
3223  if(doTransform)
3224  {
3225  n = util3d::transformPoint(n, tn);
3226  }
3227  if(uIsFinite(pt.normal_x) && uIsFinite(pt.normal_y) && uIsFinite(pt.normal_z))
3228  {
3229  float * ptr = data_normals.ptr<float>(oi++, 0);
3230  ptr[0] = n.x;
3231  ptr[1] = n.y;
3232  if(!is2d)
3233  {
3234  ptr[2] = n.z;
3235  }
3236  }
3237  }
3238  if(oi>1)
3239  {
3240  cv::PCA pca_analysis(cv::Mat(data_normals, cv::Range(0, oi*2)), cv::Mat(), CV_PCA_DATA_AS_ROW);
3241 
3242  if(pcaEigenVectors)
3243  {
3244  *pcaEigenVectors = pca_analysis.eigenvectors;
3245  }
3246  if(pcaEigenValues)
3247  {
3248  *pcaEigenValues = pca_analysis.eigenvalues;
3249  }
3250 
3251  // Get last eigen value, scale between 0 and 1: 0=low complexity, 1=high complexity
3252  return pca_analysis.eigenvalues.at<float>(0, is2d?1:2)*(is2d?2.0f:3.0f);
3253  }
3254  return 0.0f;
3255 }
3256 
3257 pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr mls(
3258  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
3259  float searchRadius,
3260  int polygonialOrder,
3261  int upsamplingMethod, // NONE, DISTINCT_CLOUD, SAMPLE_LOCAL_PLANE, RANDOM_UNIFORM_DENSITY, VOXEL_GRID_DILATION
3262  float upsamplingRadius, // SAMPLE_LOCAL_PLANE
3263  float upsamplingStep, // SAMPLE_LOCAL_PLANE
3264  int pointDensity, // RANDOM_UNIFORM_DENSITY
3265  float dilationVoxelSize, // VOXEL_GRID_DILATION
3266  int dilationIterations) // VOXEL_GRID_DILATION
3267 {
3268  pcl::IndicesPtr indices(new std::vector<int>);
3269  return mls(cloud,
3270  indices,
3271  searchRadius,
3272  polygonialOrder,
3273  upsamplingMethod,
3274  upsamplingRadius,
3275  upsamplingStep,
3276  pointDensity,
3277  dilationVoxelSize,
3278  dilationIterations);
3279 }
3280 
3281 pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr mls(
3282  const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud,
3283  const pcl::IndicesPtr & indices,
3284  float searchRadius,
3285  int polygonialOrder,
3286  int upsamplingMethod, // NONE, DISTINCT_CLOUD, SAMPLE_LOCAL_PLANE, RANDOM_UNIFORM_DENSITY, VOXEL_GRID_DILATION
3287  float upsamplingRadius, // SAMPLE_LOCAL_PLANE
3288  float upsamplingStep, // SAMPLE_LOCAL_PLANE
3289  int pointDensity, // RANDOM_UNIFORM_DENSITY
3290  float dilationVoxelSize, // VOXEL_GRID_DILATION
3291  int dilationIterations) // VOXEL_GRID_DILATION
3292 {
3293  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
3294  pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB>);
3295  if(indices->size())
3296  {
3297  tree->setInputCloud (cloud, indices);
3298  }
3299  else
3300  {
3301  tree->setInputCloud (cloud);
3302  }
3303 
3304  // Init object (second point type is for the normals)
3305  pcl::MovingLeastSquares<pcl::PointXYZRGB, pcl::PointXYZRGBNormal> mls;
3306 
3307  // Set parameters
3308  mls.setComputeNormals (true);
3309  if(polygonialOrder > 0)
3310  {
3311 #if PCL_VERSION_COMPARE(<, 1, 10, 0)
3312  mls.setPolynomialFit (true);
3313 #endif
3314  mls.setPolynomialOrder(polygonialOrder);
3315  }
3316  else
3317  {
3318 #if PCL_VERSION_COMPARE(<, 1, 10, 0)
3319  mls.setPolynomialFit (false);
3320 #else
3321  mls.setPolynomialOrder(1);
3322 #endif
3323  }
3324  UASSERT(upsamplingMethod >= mls.NONE &&
3325  upsamplingMethod <= mls.VOXEL_GRID_DILATION);
3326  mls.setUpsamplingMethod((pcl::MovingLeastSquares<pcl::PointXYZRGB, pcl::PointXYZRGBNormal>::UpsamplingMethod)upsamplingMethod);
3327  mls.setSearchRadius(searchRadius);
3328  mls.setUpsamplingRadius(upsamplingRadius);
3329  mls.setUpsamplingStepSize(upsamplingStep);
3330  mls.setPointDensity(pointDensity);
3331  mls.setDilationVoxelSize(dilationVoxelSize);
3332  mls.setDilationIterations(dilationIterations);
3333 
3334  // Reconstruct
3335  mls.setInputCloud (cloud);
3336  if(indices->size())
3337  {
3338  mls.setIndices(indices);
3339  }
3340  mls.setSearchMethod (tree);
3341  mls.process (*cloud_with_normals);
3342 
3343  // It seems that returned normals are not normalized!? FIXME: Is it a bug only in PCL 1.7.1?
3344  for(unsigned int i=0; i<cloud_with_normals->size(); ++i)
3345  {
3346  Eigen::Vector3f normal(cloud_with_normals->at(i).normal_x, cloud_with_normals->at(i).normal_y, cloud_with_normals->at(i).normal_z);
3347  normal.normalize();
3348  cloud_with_normals->at(i).normal_x = normal[0];
3349  cloud_with_normals->at(i).normal_y = normal[1];
3350  cloud_with_normals->at(i).normal_z = normal[2];
3351  }
3352 
3353  return cloud_with_normals;
3354 }
3355 
3357  const LaserScan & scan,
3358  const Eigen::Vector3f & viewpoint,
3359  bool forceGroundNormalsUp)
3360 {
3361  return adjustNormalsToViewPoint(scan, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3362 }
3364  const LaserScan & scan,
3365  const Eigen::Vector3f & viewpoint,
3366  float groundNormalsUp)
3367 {
3368  if(scan.size() && !scan.is2d() && scan.hasNormals())
3369  {
3370  int nx = scan.getNormalsOffset();
3371  int ny = nx+1;
3372  int nz = ny+1;
3373  cv::Mat output = scan.data().clone();
3374  #pragma omp parallel for
3375  for(int i=0; i<scan.size(); ++i)
3376  {
3377  float * ptr = output.ptr<float>(0, i);
3378  if(uIsFinite(ptr[nx]) && uIsFinite(ptr[ny]) && uIsFinite(ptr[nz]))
3379  {
3380  Eigen::Vector3f v = viewpoint - Eigen::Vector3f(ptr[0], ptr[1], ptr[2]);
3381  Eigen::Vector3f n(ptr[nx], ptr[ny], ptr[nz]);
3382 
3383  float result = v.dot(n);
3384  if(result < 0
3385  || (groundNormalsUp>0.0f && ptr[nz] < -groundNormalsUp && ptr[2] < viewpoint[3])) // some far velodyne rays on road can have normals toward ground
3386  {
3387  //reverse normal
3388  ptr[nx] *= -1.0f;
3389  ptr[ny] *= -1.0f;
3390  ptr[nz] *= -1.0f;
3391  }
3392  }
3393  }
3394  if(scan.angleIncrement() > 0.0f)
3395  {
3396  return LaserScan(output, scan.format(), scan.rangeMin(), scan.rangeMax(), scan.angleMin(), scan.angleMax(), scan.angleIncrement(), scan.localTransform());
3397  }
3398  else
3399  {
3400  return LaserScan(output, scan.maxPoints(), scan.rangeMax(), scan.format(), scan.localTransform());
3401  }
3402  }
3403  return scan;
3404 }
3405 
3406 template<typename PointNormalT>
3408  typename pcl::PointCloud<PointNormalT>::Ptr & cloud,
3409  const Eigen::Vector3f & viewpoint,
3410  float groundNormalsUp)
3411 {
3412  for(unsigned int i=0; i<cloud->size(); ++i)
3413  {
3414  pcl::PointXYZ normal(cloud->points[i].normal_x, cloud->points[i].normal_y, cloud->points[i].normal_z);
3415  if(pcl::isFinite(normal))
3416  {
3417  Eigen::Vector3f v = viewpoint - cloud->points[i].getVector3fMap();
3418  Eigen::Vector3f n(normal.x, normal.y, normal.z);
3419 
3420  float result = v.dot(n);
3421  if(result < 0
3422  || (groundNormalsUp>0.0f && normal.z < -groundNormalsUp && cloud->points[i].z < viewpoint[3])) // some far velodyne rays on road can have normals toward ground
3423  {
3424  //reverse normal
3425  cloud->points[i].normal_x *= -1.0f;
3426  cloud->points[i].normal_y *= -1.0f;
3427  cloud->points[i].normal_z *= -1.0f;
3428  }
3429  }
3430  }
3431 }
3432 
3434  pcl::PointCloud<pcl::PointNormal>::Ptr & cloud,
3435  const Eigen::Vector3f & viewpoint,
3436  bool forceGroundNormalsUp)
3437 {
3438  adjustNormalsToViewPoint(cloud, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3439 }
3441  pcl::PointCloud<pcl::PointNormal>::Ptr & cloud,
3442  const Eigen::Vector3f & viewpoint,
3443  float groundNormalsUp)
3444 {
3445  adjustNormalsToViewPointImpl<pcl::PointNormal>(cloud, viewpoint, groundNormalsUp);
3446 }
3447 
3449  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
3450  const Eigen::Vector3f & viewpoint,
3451  bool forceGroundNormalsUp)
3452 {
3453  adjustNormalsToViewPoint(cloud, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3454 }
3456  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud,
3457  const Eigen::Vector3f & viewpoint,
3458  float groundNormalsUp)
3459 {
3460  adjustNormalsToViewPointImpl<pcl::PointXYZRGBNormal>(cloud, viewpoint, groundNormalsUp);
3461 }
3462 
3464  pcl::PointCloud<pcl::PointXYZINormal>::Ptr & cloud,
3465  const Eigen::Vector3f & viewpoint,
3466  bool forceGroundNormalsUp)
3467 {
3468  adjustNormalsToViewPoint(cloud, viewpoint, forceGroundNormalsUp?0.8f:0.0f);
3469 }
3471  pcl::PointCloud<pcl::PointXYZINormal>::Ptr & cloud,
3472  const Eigen::Vector3f & viewpoint,
3473  float groundNormalsUp)
3474 {
3475  adjustNormalsToViewPointImpl<pcl::PointXYZINormal>(cloud, viewpoint, groundNormalsUp);
3476 }
3477 
3479  const std::map<int, Transform> & poses,
3480  const pcl::PointCloud<pcl::PointXYZ>::Ptr & rawCloud,
3481  const std::vector<int> & rawCameraIndices,
3482  pcl::PointCloud<pcl::PointNormal>::Ptr & cloud)
3483 {
3484  if(poses.size() && rawCloud->size() && rawCloud->size() == rawCameraIndices.size() && cloud->size())
3485  {
3486  pcl::search::KdTree<pcl::PointXYZ>::Ptr rawTree (new pcl::search::KdTree<pcl::PointXYZ>);
3487  rawTree->setInputCloud (rawCloud);
3488 
3489  for(unsigned int i=0; i<cloud->size(); ++i)
3490  {
3491  pcl::PointXYZ normal(cloud->points[i].normal_x, cloud->points[i].normal_y, cloud->points[i].normal_z);
3492  if(pcl::isFinite(normal))
3493  {
3494  std::vector<int> indices;
3495  std::vector<float> dist;
3496  rawTree->nearestKSearch(pcl::PointXYZ(cloud->points[i].x, cloud->points[i].y, cloud->points[i].z), 1, indices, dist);
3497  UASSERT(indices.size() == 1);
3498  if(indices.size() && indices[0]>=0)
3499  {
3500  Transform p = poses.at(rawCameraIndices[indices[0]]);
3501  pcl::PointXYZ viewpoint(p.x(), p.y(), p.z());
3502  Eigen::Vector3f v = viewpoint.getVector3fMap() - cloud->points[i].getVector3fMap();
3503 
3504  Eigen::Vector3f n(normal.x, normal.y, normal.z);
3505 
3506  float result = v.dot(n);
3507  if(result < 0)
3508  {
3509  //reverse normal
3510  cloud->points[i].normal_x *= -1.0f;
3511  cloud->points[i].normal_y *= -1.0f;
3512  cloud->points[i].normal_z *= -1.0f;
3513  }
3514  }
3515  else
3516  {
3517  UWARN("Not found camera viewpoint for point %d", i);
3518  }
3519  }
3520  }
3521  }
3522 }
3523 
3525  const std::map<int, Transform> & poses,
3526  const pcl::PointCloud<pcl::PointXYZ>::Ptr & rawCloud,
3527  const std::vector<int> & rawCameraIndices,
3528  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr & cloud)
3529 {
3530  UASSERT(rawCloud.get() && cloud.get());
3531  UDEBUG("poses=%d, rawCloud=%d, rawCameraIndices=%d, cloud=%d", (int)poses.size(), (int)rawCloud->size(), (int)rawCameraIndices.size(), (int)cloud->size());
3532  if(poses.size() && rawCloud->size() && rawCloud->size() == rawCameraIndices.size() && cloud->size())
3533  {
3534  pcl::search::KdTree<pcl::PointXYZ>::Ptr rawTree (new pcl::search::KdTree<pcl::PointXYZ>);
3535  rawTree->setInputCloud (rawCloud);
3536  for(unsigned int i=0; i<cloud->size(); ++i)
3537  {
3538  pcl::PointXYZ normal(cloud->points[i].normal_x, cloud->points[i].normal_y, cloud->points[i].normal_z);
3539  if(pcl::isFinite(normal))
3540  {
3541  std::vector<int> indices;
3542  std::vector<float> dist;
3543  rawTree->nearestKSearch(pcl::PointXYZ(cloud->points[i].x, cloud->points[i].y, cloud->points[i].z), 1, indices, dist);
3544  if(indices.size() && indices[0]>=0)
3545  {
3546  UASSERT_MSG(indices[0]<(int)rawCameraIndices.size(), uFormat("indices[0]=%d rawCameraIndices.size()=%d", indices[0], (int)rawCameraIndices.size()).c_str());
3547  UASSERT(uContains(poses, rawCameraIndices[indices[0]]));
3548  Transform p = poses.at(rawCameraIndices[indices[0]]);
3549  pcl::PointXYZ viewpoint(p.x(), p.y(), p.z());
3550  Eigen::Vector3f v = viewpoint.getVector3fMap() - cloud->points[i].getVector3fMap();
3551 
3552  Eigen::Vector3f n(normal.x, normal.y, normal.z);
3553 
3554  float result = v.dot(n);
3555  if(result < 0)
3556  {
3557  //reverse normal
3558  cloud->points[i].normal_x *= -1.0f;
3559  cloud->points[i].normal_y *= -1.0f;
3560  cloud->points[i].normal_z *= -1.0f;
3561  }
3562  }
3563  else
3564  {
3565  UWARN("Not found camera viewpoint for point %d!?", i);
3566  }
3567  }
3568  }
3569  }
3570 }
3571 
3572 pcl::PolygonMesh::Ptr meshDecimation(const pcl::PolygonMesh::Ptr & mesh, float factor)
3573 {
3574  pcl::PolygonMesh::Ptr output(new pcl::PolygonMesh);
3575 #ifndef DISABLE_VTK
3576  pcl::MeshQuadricDecimationVTK mqd;
3577  mqd.setTargetReductionFactor(factor);
3578  mqd.setInputMesh(mesh);
3579  mqd.process (*output);
3580 #else
3581  UWARN("RTAB-Map is not built with VTK module so mesh decimation cannot be used!");
3582  *output = *mesh;
3583 #endif
3584  return output;
3585 }
3586 
3588  const Eigen::Vector3f & p,
3589  const Eigen::Vector3f & dir,
3590  const Eigen::Vector3f & v0,
3591  const Eigen::Vector3f & v1,
3592  const Eigen::Vector3f & v2,
3593  float & distance,
3594  Eigen::Vector3f & normal)
3595 {
3596  // get triangle edge cv::Vec3fs and plane normal
3597  const Eigen::Vector3f u = v1-v0;
3598  const Eigen::Vector3f v = v2-v0;
3599  normal = u.cross(v); // cross product
3600  if (normal == Eigen::Vector3f(0,0,0)) // triangle is degenerate
3601  return false; // do not deal with this case
3602 
3603  const float denomimator = normal.dot(dir);
3604  if (fabs(denomimator) < 10e-9) // ray is parallel to triangle plane
3605  return false;
3606 
3607  // get intersect of ray with triangle plane
3608  distance = normal.dot(v0 - p) / denomimator;
3609  if (distance < 0.0) // ray goes away from triangle
3610  return false;
3611 
3612  // is I inside T?
3613  float uu, uv, vv, wu, wv, D;
3614  uu = u.dot(u);
3615  uv = u.dot(v);
3616  vv = v.dot(v);
3617  const Eigen::Vector3f w = p + dir * distance - v0;
3618  wu = w.dot(u);
3619  wv = w.dot(v);
3620  D = uv * uv - uu * vv;
3621 
3622  // get and test parametric coords
3623  float s, t;
3624  s = (uv * wv - vv * wu) / D;
3625  if (s < 0.0 || s > 1.0) // I is outside T
3626  return false;
3627  t = (uv * wu - uu * wv) / D;
3628  if (t < 0.0 || (s + t) > 1.0) // I is outside T
3629  return false;
3630 
3631  return true; // I is in T
3632 }
3633 
3634 }
3635 
3636 }
bool uIsInteger(const std::string &str, bool checkForSign=true)
Definition: UStl.h:632
int UTILITE_EXP uStr2Int(const std::string &str)
std::vector< int > RTABMAP_EXP filterNaNPointsFromMesh(const pcl::PointCloud< pcl::PointXYZRGB > &cloud, const std::vector< pcl::Vertices > &polygons, pcl::PointCloud< pcl::PointXYZRGB > &outputCloud, std::vector< pcl::Vertices > &outputPolygons)
T uVariance(const T *v, unsigned int size, T meanV)
Definition: UMath.h:491
bool RTABMAP_EXP isFinite(const cv::Point3f &pt)
Definition: util3d.cpp:3108
int imageWidth() const
Definition: CameraModel.h:120
float rangeMin() const
Definition: LaserScan.h:93
void setViewpoint(const Eigen::Vector3f &viewpoint)
Set the viewpoint from where the input point cloud has been acquired.
rtabmap::CameraThread * cam
void uncompressDataConst(cv::Mat *imageRaw, cv::Mat *depthOrRightRaw, LaserScan *laserScanRaw=0, cv::Mat *userDataRaw=0, cv::Mat *groundCellsRaw=0, cv::Mat *obstacleCellsRaw=0, cv::Mat *emptyCellsRaw=0) const
Definition: SensorData.cpp:579
double epsilon
Definition: UTimer.h:46
static bool makeDir(const std::string &dirPath)
Definition: UDirectory.cpp:333
T uMean(const T *v, unsigned int size)
Definition: UMath.h:401
pcl::TextureMesh::Ptr RTABMAP_EXP concatenateTextureMeshes(const std::list< pcl::TextureMesh::Ptr > &meshes)
pcl::PointCloud< pcl::Normal >::Ptr computeNormals2DImpl(const typename pcl::PointCloud< PointT >::Ptr &cloud, int searchK, float searchRadius, const Eigen::Vector3f &viewPoint)
std::string getName()
Definition: UFile.h:135
const cv::Size & imageSize() const
Definition: CameraModel.h:119
cv::Point3f RTABMAP_EXP transformPoint(const cv::Point3f &pt, const Transform &transform)
const cv::Mat & data() const
Definition: LaserScan.h:88
pcl::PointCloud< pcl::Normal >::Ptr RTABMAP_EXP computeNormals2D(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, int searchK=5, float searchRadius=0.0f, const Eigen::Vector3f &viewPoint=Eigen::Vector3f(0, 0, 0))
Format format() const
Definition: LaserScan.h:89
bool hasNormals() const
Definition: LaserScan.h:105
f
bool textureMeshwithMultipleCameras2(pcl::TextureMesh &mesh, const pcl::texture_mapping::CameraVector &cameras, const rtabmap::ProgressState *callback=0, std::vector< std::map< int, pcl::PointXY > > *vertexToPixels=0, bool distanceToCamPolicy=false)
void setTriangulationType(TriangulationType type)
Set the triangulation type (see TriangulationType)
Simple triangulation/surface reconstruction for organized point clouds. Neighboring points (pixels in...
std::vector< Camera, Eigen::aligned_allocator< Camera > > CameraVector
GLM_FUNC_DECL vecType< T, P > sqrt(vecType< T, P > const &x)
SensorData getNodeData(int locationId, bool images, bool scan, bool userData, bool occupancyGrid) const
Definition: Memory.cpp:3913
std::vector< pcl::Vertices > RTABMAP_EXP filterCloseVerticesFromMesh(const pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr cloud, const std::vector< pcl::Vertices > &polygons, float radius, float angle, bool keepLatestInRadius)
static std::string getDir(const std::string &filePath)
Definition: UDirectory.cpp:273
const float maxDepthError
Definition: CameraTango.cpp:42
unsigned char uchar
Definition: matrix.h:41
pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr RTABMAP_EXP laserScanToPointCloudRGBNormal(const LaserScan &laserScan, const Transform &transform=Transform(), unsigned char r=100, unsigned char g=100, unsigned char b=100)
Definition: util3d.cpp:2453
Transform rotation() const
Definition: Transform.cpp:195
pcl::PointCloud< pcl::Normal >::Ptr computeNormalsImpl(const typename pcl::PointCloud< PointT >::Ptr &cloud, const pcl::IndicesPtr &indices, int searchK, float searchRadius, const Eigen::Vector3f &viewPoint)
std::vector< int > RTABMAP_EXP filterNotUsedVerticesFromMesh(const pcl::PointCloud< pcl::PointXYZRGBNormal > &cloud, const std::vector< pcl::Vertices > &polygons, pcl::PointCloud< pcl::PointXYZRGBNormal > &outputCloud, std::vector< pcl::Vertices > &outputPolygons)
bool RTABMAP_EXP multiBandTexturing(const std::string &outputOBJPath, const pcl::PCLPointCloud2 &cloud, const std::vector< pcl::Vertices > &polygons, const std::map< int, Transform > &cameraPoses, const std::vector< std::map< int, pcl::PointXY > > &vertexToPixels, const std::map< int, cv::Mat > &images, const std::map< int, std::vector< CameraModel > > &cameraModels, const Memory *memory=0, const DBDriver *dbDriver=0, int textureSize=8192, const std::string &textureFormat="jpg", const std::map< int, std::map< int, cv::Vec4d > > &gains=std::map< int, std::map< int, cv::Vec4d > >(), const std::map< int, std::map< int, cv::Mat > > &blendingGains=std::map< int, std::map< int, cv::Mat > >(), const std::pair< float, float > &contrastValues=std::pair< float, float >(0, 0), bool gainRGB=true)
float rangeMax() const
Definition: LaserScan.h:94
The texture mapping algorithm.
static int erase(const std::string &filePath)
Definition: UFile.cpp:58
void setAngleTolerance(float angle_tolerance)
Set the angle tolerance used for checking whether or not an edge is occluded. Standard values are 5de...
GLM_FUNC_DECL genType e()
bool isEmpty() const
Definition: LaserScan.h:101
bool hasRGB() const
Definition: LaserScan.h:106
GLM_FUNC_DECL detail::tmat4x4< T, P > scale(detail::tmat4x4< T, P > const &m, detail::tvec3< T, P > const &v)
GLM_FUNC_DECL bool all(vecType< bool, P > const &v)
static const float vertices[]
Definition: quad.cpp:39
void getNodeData(int signatureId, SensorData &data, bool images=true, bool scan=true, bool userData=true, bool occupancyGrid=true) const
Definition: DBDriver.cpp:675
char * dst
Definition: lz4.h:354
void setMinClusterSize(int size)
GLM_FUNC_DECL genType::value_type distance(genType const &p0, genType const &p1)
pcl::PointCloud< pcl::Normal >::Ptr RTABMAP_EXP computeFastOrganizedNormals(const pcl::PointCloud< pcl::PointXYZRGB >::Ptr &cloud, float maxDepthChangeFactor=0.02f, float normalSmoothingSize=10.0f, const Eigen::Vector3f &viewPoint=Eigen::Vector3f(0, 0, 0))
GLM_FUNC_DECL genType::row_type row(genType const &m, length_t const &index)
int maxPoints() const
Definition: LaserScan.h:92
int getNormalsOffset() const
Definition: LaserScan.h:113
bool is2d() const
Definition: LaserScan.h:104
Basic mathematics functions.
GLM_FUNC_DECL T angle(detail::tquat< T, P > const &x)
Some conversion functions.
LaserScan computeNormals(const LaserScan &laserScan, int searchK, float searchRadius)
pcl::TextureMesh::Ptr RTABMAP_EXP assembleTextureMesh(const cv::Mat &cloudMat, const std::vector< std::vector< std::vector< RTABMAP_PCL_INDEX > > > &polygons, const std::vector< std::vector< Eigen::Vector2f > > &texCoords, cv::Mat &textures, bool mergeTextures=false)
int size() const
Definition: LaserScan.h:102
int gcd(int a, int b)
const cv::Mat & imageRaw() const
Definition: SensorData.h:164
GLM_FUNC_DECL bool intersectRayTriangle(genType const &orig, genType const &dir, genType const &vert0, genType const &vert1, genType const &vert2, genType &baryPosition)
pcl::PointCloud< pcl::Normal >::Ptr computeFastOrganizedNormals2DImpl(const typename pcl::PointCloud< PointT >::Ptr &cloud, int searchK, float searchRadius, const Eigen::Vector3f &viewPoint)
bool uStrContains(const std::string &string, const std::string &substring)
Definition: UStl.h:787
pcl::PointCloud< pcl::Normal >::Ptr RTABMAP_EXP computeFastOrganizedNormals2D(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, int searchK=5, float searchRadius=0.0f, const Eigen::Vector3f &viewPoint=Eigen::Vector3f(0, 0, 0))
pcl::PointCloud< pcl::PointXYZ >::Ptr RTABMAP_EXP laserScanToPointCloud(const LaserScan &laserScan, const Transform &transform=Transform())
Definition: util3d.cpp:2382
bool isIdentity() const
Definition: Transform.cpp:136
std::list< std::string > uSplit(const std::string &str, char separator= ' ')
Definition: UStl.h:566
std::vector< double > roi
cv::Mat RTABMAP_EXP laserScan2dFromPointCloud(const pcl::PointCloud< pcl::PointXYZ > &cloud, const Transform &transform=Transform(), bool filterNaNs=true)
Definition: util3d.cpp:2105
pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr RTABMAP_EXP mls(const pcl::PointCloud< pcl::PointXYZRGB >::Ptr &cloud, float searchRadius=0.0f, int polygonialOrder=2, int upsamplingMethod=0, float upsamplingRadius=0.0f, float upsamplingStep=0.0f, int pointDensity=0, float dilationVoxelSize=1.0f, int dilationIterations=0)
double sqr(uchar v)
bool uIsFinite(const T &value)
Definition: UMath.h:55
pcl::texture_mapping::CameraVector createTextureCameras(const std::map< int, Transform > &poses, const std::map< int, std::vector< CameraModel > > &cameraModels, const std::map< int, cv::Mat > &cameraDepths, const std::vector< float > &roiRatios)
#define UASSERT(condition)
Structure to store camera pose and focal length.
virtual bool callback(const std::string &msg) const
Definition: ProgressState.h:39
pcl::PointCloud< pcl::PointXYZRGB >::Ptr RTABMAP_EXP laserScanToPointCloudRGB(const LaserScan &laserScan, const Transform &transform=Transform(), unsigned char r=100, unsigned char g=100, unsigned char b=100)
Definition: util3d.cpp:2417
const CameraModel & left() const
std::list< std::list< int > > RTABMAP_EXP clusterPolygons(const std::vector< std::set< int > > &neighborPolygons, int minClusterSize=0)
void setTrianglePixelSize(int triangle_size)
Set the edge length (in pixels) used for constructing the fixed mesh.
double cx() const
Definition: CameraModel.h:104
pcl::PolygonMesh::Ptr RTABMAP_EXP meshDecimation(const pcl::PolygonMesh::Ptr &mesh, float factor)
bool isNull() const
Definition: Transform.cpp:107
std::vector< pcl::Vertices > RTABMAP_EXP convertPolygonsToPCL(const std::vector< std::vector< RTABMAP_PCL_INDEX > > &polygons)
void RTABMAP_EXP appendMesh(pcl::PointCloud< pcl::PointXYZRGBNormal > &cloudA, std::vector< pcl::Vertices > &polygonsA, const pcl::PointCloud< pcl::PointXYZRGBNormal > &cloudB, const std::vector< pcl::Vertices > &polygonsB)
#define UASSERT_MSG(condition, msg_str)
Definition: ULogger.h:67
float angleMin() const
Definition: LaserScan.h:95
GLM_FUNC_DECL genType pi()
bool getCalibration(int signatureId, std::vector< CameraModel > &models, StereoCameraModel &stereoModel) const
Definition: DBDriver.cpp:726
void adjustNormalsToViewPoint(pcl::PointCloud< pcl::PointXYZINormal >::Ptr &cloud, const Eigen::Vector3f &viewpoint, bool forceGroundNormalsUp)
float angleMax() const
Definition: LaserScan.h:96
tree
Transform localTransform() const
Definition: LaserScan.h:98
void setMaxDepthError(float maxDepthError)
float angleIncrement() const
Definition: LaserScan.h:97
static ULogger::Level level()
Definition: ULogger.h:340
pcl::PolygonMesh::Ptr RTABMAP_EXP createMesh(const pcl::PointCloud< pcl::PointXYZRGBNormal >::Ptr &cloudWithNormals, float gp3SearchRadius=0.025, float gp3Mu=2.5, int gp3MaximumNearestNeighbors=100, float gp3MaximumSurfaceAngle=M_PI/4, float gp3MinimumAngle=M_PI/18, float gp3MaximumAngle=2 *M_PI/3, bool gp3NormalConsistency=true)
std::vector< pcl::Vertices > RTABMAP_EXP organizedFastMesh(const pcl::PointCloud< pcl::PointXYZ >::Ptr &cloud, double angleTolerance, bool quad, int trianglePixelSize, const Eigen::Vector3f &viewpoint=Eigen::Vector3f(0, 0, 0))
void RTABMAP_EXP concatenateTextureMaterials(pcl::TextureMesh &mesh, const cv::Size &imageSize, int textureSize, int maxTextures, float &scale, std::vector< bool > *materialsKept=0)
bool uContains(const std::list< V > &list, const V &value)
Definition: UStl.h:409
double cy() const
Definition: CameraModel.h:105
RecoveryProgressState state
GLM_FUNC_DECL detail::tquat< T, P > rotation(detail::tvec3< T, P > const &orig, detail::tvec3< T, P > const &dest)
cv::Mat RTABMAP_EXP mergeTextures(pcl::TextureMesh &mesh, const std::map< int, cv::Mat > &images, const std::map< int, CameraModel > &calibrations, const Memory *memory=0, const DBDriver *dbDriver=0, int textureSize=4096, int textureCount=1, const std::vector< std::map< int, pcl::PointXY > > &vertexToPixels=std::vector< std::map< int, pcl::PointXY > >(), bool gainCompensation=true, float gainBeta=10.0f, bool gainRGB=true, bool blending=true, int blendingDecimation=0, int brightnessContrastRatioLow=0, int brightnessContrastRatioHigh=0, bool exposureFusion=false, const ProgressState *state=0, unsigned char blankValue=255, std::map< int, std::map< int, cv::Vec4d > > *gains=0, std::map< int, std::map< int, cv::Mat > > *blendingGains=0, std::pair< float, float > *contrastValues=0)
cv::Mat RTABMAP_EXP exposureFusion(const std::vector< cv::Mat > &images)
Definition: util2d.cpp:2027
void adjustNormalsToViewPointImpl(typename pcl::PointCloud< PointNormalT >::Ptr &cloud, const Eigen::Vector3f &viewpoint, float groundNormalsUp)
std::vector< std::vector< RTABMAP_PCL_INDEX > > RTABMAP_EXP convertPolygonsFromPCL(const std::vector< pcl::Vertices > &polygons)
void RTABMAP_EXP adjustNormalsToViewPoints(const std::map< int, Transform > &poses, const pcl::PointCloud< pcl::PointXYZ >::Ptr &rawCloud, const std::vector< int > &rawCameraIndices, pcl::PointCloud< pcl::PointNormal >::Ptr &cloud)
pcl::TextureMesh::Ptr RTABMAP_EXP createTextureMesh(const pcl::PolygonMesh::Ptr &mesh, const std::map< int, Transform > &poses, const std::map< int, CameraModel > &cameraModels, const std::map< int, cv::Mat > &cameraDepths, float maxDistance=0.0f, float maxDepthError=0.0f, float maxAngle=0.0f, int minClusterSize=50, const std::vector< float > &roiRatios=std::vector< float >(), const ProgressState *state=0, std::vector< std::map< int, pcl::PointXY > > *vertexToPixels=0, bool distanceToCamPolicy=false)
void RTABMAP_EXP fixTextureMeshForVisualization(pcl::TextureMesh &textureMesh)
const std::vector< CameraModel > & cameraModels() const
Definition: SensorData.h:215
std::vector< pcl::Vertices > RTABMAP_EXP filterInvalidPolygons(const std::vector< pcl::Vertices > &polygons)
#define UDEBUG(...)
double fx() const
Definition: CameraModel.h:102
void RTABMAP_EXP cleanTextureMesh(pcl::TextureMesh &textureMesh, int minClusterSize)
pcl::PointCloud< pcl::PointNormal >::Ptr RTABMAP_EXP laserScanToPointCloudNormal(const LaserScan &laserScan, const Transform &transform=Transform())
Definition: util3d.cpp:2400
bool isCanceled() const
Definition: ProgressState.h:51
void RTABMAP_EXP createPolygonIndexes(const std::vector< pcl::Vertices > &polygons, int cloudSize, std::vector< std::set< int > > &neighborPolygons, std::vector< std::set< int > > &vertexPolygons)
Given a set of polygons, create two indexes: polygons to neighbor polygons and vertices to polygons...
#define UERROR(...)
static bool removeDir(const std::string &dirPath)
Definition: UDirectory.cpp:344
ULogger class and convenient macros.
const StereoCameraModel & stereoCameraModel() const
Definition: SensorData.h:216
#define UWARN(...)
double ticks()
Definition: UTimer.cpp:117
void setMaxAngle(float maxAngle)
const cv::Mat & imageCompressed() const
Definition: SensorData.h:160
pcl::PointCloud< pcl::PointNormal >::Ptr RTABMAP_EXP removeNaNNormalsFromPointCloud(const pcl::PointCloud< pcl::PointNormal >::Ptr &cloud)
Transform inverse() const
Definition: Transform.cpp:178
void setMaxDistance(float maxDistance)
static const GLushort kIndices[]
pcl::PointCloud< pcl::PointXYZI >::Ptr RTABMAP_EXP laserScanToPointCloudI(const LaserScan &laserScan, const Transform &transform=Transform(), float intensity=0.0f)
Definition: util3d.cpp:2435
T uSum(const std::list< T > &list)
Definition: UMath.h:320
cv::Mat RTABMAP_EXP brightnessAndContrastAuto(const cv::Mat &src, const cv::Mat &mask, float clipLowHistPercent=0, float clipHighHistPercent=0, float *alphaOut=0, float *betaOut=0)
Automatic brightness and contrast optimization with optional histogram clipping.
Definition: util2d.cpp:1942
pcl::IndicesPtr RTABMAP_EXP concatenate(const std::vector< pcl::IndicesPtr > &indices)
Concatenate a vector of indices to a single vector.
Definition: util3d.cpp:3133
std::string UTILITE_EXP uFormat(const char *fmt,...)
std::string UTILITE_EXP uNumber2Str(unsigned int number)
Definition: UConversion.cpp:90
int imageHeight() const
Definition: CameraModel.h:121
float RTABMAP_EXP computeNormalsComplexity(const LaserScan &scan, const Transform &t=Transform::getIdentity(), cv::Mat *pcaEigenVectors=0, cv::Mat *pcaEigenValues=0)
pcl::PolygonMesh::Ptr RTABMAP_EXP assemblePolygonMesh(const cv::Mat &cloudMat, const std::vector< std::vector< RTABMAP_PCL_INDEX > > &polygons)
bool hasIntensity() const
Definition: LaserScan.h:107
cv::Mat RTABMAP_EXP uncompressImage(const cv::Mat &bytes)
const Transform & localTransform() const
Definition: CameraModel.h:116
LaserScan RTABMAP_EXP laserScanFromPointCloud(const pcl::PCLPointCloud2 &cloud, bool filterNaNs=true, bool is2D=false, const Transform &transform=Transform())
Definition: util3d.cpp:1266
#define UINFO(...)


rtabmap
Author(s): Mathieu Labbe
autogenerated on Mon Dec 14 2020 03:37:06