build_map.py
Go to the documentation of this file.
1 # Code example for DataFilter taking a sequence of point clouds with
2 # their global coordinates and build a map with a fix (manageable) number of points.
3 # The example shows how to generate filters in the source code.
4 # For an example generating filters using yaml configuration, see demo_cmake/convert.cpp
5 # For an example with a registration solution, see icp.cpp
6 
7 import numpy as np
8 
9 from pypointmatcher import pointmatcher as pm, pointmatchersupport as pms
10 
11 PM = pm.PointMatcher
12 PMIO = pm.PointMatcherIO
13 DP = PM.DataPoints
14 params = pms.Parametrizable.Parameters()
15 
16 # Loading the list of files
17 file_info_list = PMIO.FileInfoVector("../data/carCloudList.csv", "../data/")
18 total_point_count = 30000
19 
20 # Path of output directory (default: tests/build_map/)
21 # The output directory must already exist
22 # Leave empty to save in the current directory
23 output_base_directory = "tests/build_map/"
24 
25 # Name of output file: file_name.{vtk,csv,ply} (default: test.vtk)
26 output_file_name = "test.vtk"
27 
28 pms.setLogger(PM.get().LoggerRegistrar.create("FileLogger"))
29 
30 map_cloud = DP()
31 last_cloud = DP()
32 new_cloud = DP()
33 
34 T = np.identity(4)
35 
36 # Define transformation chain
37 transformation = PM.get().TransformationRegistrar.create("RigidTransformation")
38 
39 # This filter will remove a sphere of 1 m radius. Easy way to remove the sensor self-scanning.
40 params["minDist"] = "1.0"
41 remove_scanner = PM.get().DataPointsFilterRegistrar.create("MinDistDataPointsFilter", params)
42 params.clear()
43 
44 # This filter will randomly remove 35% of the points.
45 params["prob"] = "0.65"
46 rand_subsample = PM.get().DataPointsFilterRegistrar.create("RandomSamplingDataPointsFilter", params)
47 params.clear()
48 
49 # For a complete description of filter, see
50 # https://github.com/norlab-ulaval/libpointmatcher/blob/master/doc/Datafilters.md
51 params["knn"] = "10"
52 params["epsilon"] = "5"
53 params["keepNormals"] = "1"
54 params["keepDensities"] = "0"
55 normal_filter = PM.get().DataPointsFilterRegistrar.create("SurfaceNormalDataPointsFilter", params)
56 params.clear()
57 
58 params["knn"] = "10"
59 params["epsilon"] = "5"
60 params["keepDensities"] = "1"
61 params["keepNormals"] = "0"
62 density_filter = PM.get().DataPointsFilterRegistrar.create("SurfaceNormalDataPointsFilter", params)
63 params.clear()
64 
65 observation_direction_filter = PM.get().DataPointsFilterRegistrar.create("ObservationDirectionDataPointsFilter")
66 
67 params["towardCenter"] = "1"
68 orien_normal_filter = PM.get().DataPointsFilterRegistrar.create("OrientNormalsDataPointsFilter", params)
69 params.clear()
70 
71 params["maxDensity"] = "30"
72 uniform_subsample = PM.get().DataPointsFilterRegistrar.create("MaxDensityDataPointsFilter", params)
73 params.clear()
74 
75 shadow_filter = PM.get().DataPointsFilterRegistrar.create("ShadowDataPointsFilter")
76 
77 for i in range(len(file_info_list)):
78  print("\n-----------------------------")
79  print(f"Loading {file_info_list[i].readingFileName} ", end="")
80 
81  new_cloud = DP.load(file_info_list[i].readingFileName)
82 
83  print(f"found {new_cloud.getNbPoints()} points.")
84 
85  if file_info_list[i].groundTruthTransformation.shape[0] != 0:
86  T = file_info_list[i].groundTruthTransformation
87  else:
88  print("ERROR: the field gTXX (ground truth) is required")
89  exit()
90 
91  # Remove the scanner
92  new_cloud = remove_scanner.filter(new_cloud)
93 
94  # Accelerate the process and dissolve lines
95  new_cloud = rand_subsample.filter(new_cloud)
96 
97  # Build filter to remove shadow points and down-sample
98  new_cloud = normal_filter.filter(new_cloud)
99  new_cloud = observation_direction_filter.filter(new_cloud)
100  new_cloud = orien_normal_filter.filter(new_cloud)
101  new_cloud = shadow_filter.filter(new_cloud)
102 
103  # Transforme pointCloud
104  print(f"Transformation matrix:\n{T}\n".replace("[", " ").replace("]", " "), end="")
105  new_cloud = transformation.compute(new_cloud, T)
106 
107  if i == 0:
108  map_cloud = new_cloud
109  else:
110  map_cloud.concatenate(new_cloud)
111 
112  # Control point cloud size
113  prob_to_keep = total_point_count / map_cloud.features.shape[1]
114 
115  if prob_to_keep < 1:
116  map_cloud = density_filter.filter(map_cloud)
117  map_cloud = uniform_subsample.filter(map_cloud)
118 
119  prob_to_keep = total_point_count / map_cloud.features.shape[1]
120 
121  if prob_to_keep < 1:
122  print(f"Randomly keep {prob_to_keep * 100}% points")
123 
124  rand_subsample = PM.get().DataPointsFilterRegistrar.create(
125  "RandomSamplingDataPointsFilter",
126  {"prob": f"{prob_to_keep}"})
127 
128  map_cloud = rand_subsample.filter(map_cloud)
129 
130  map_cloud.save(f"{output_base_directory + output_file_name[:-4]}_{i}.vtk")
131 
132 map_cloud = density_filter.filter(map_cloud)
133 map_cloud = uniform_subsample.filter(map_cloud)
134 map_cloud = density_filter.filter(map_cloud)
135 
136 print("\n-----------------------------"*2)
137 print(f"Final number of points in the map: {map_cloud.getNbPoints()}")
138 
139 map_cloud.save(f"{output_base_directory + output_file_name}")
PointMatcherSupport::setLogger
void setLogger(std::shared_ptr< Logger > newLogger)
Set a new logger, protected by a mutex.
Definition: Logger.cpp:98
PointMatcher::DataPoints
A point cloud.
Definition: PointMatcher.h:207
PointMatcherIO::FileInfoVector
A vector of file info, to be used in batch processing.
Definition: IO.h:245


libpointmatcher
Author(s):
autogenerated on Mon Sep 16 2024 02:24:07