test_cluster_based_estimation.cpp
Go to the documentation of this file.
1 // Copyright 2023-2024 Ekumen, Inc.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include <gmock/gmock.h>
16 
19 #include <beluga/views.hpp>
20 #include <range/v3/action/sort.hpp>
21 #include <range/v3/action/unique.hpp>
22 #include <range/v3/view/filter.hpp>
23 #include <range/v3/view/unique.hpp>
24 #include <sophus/se2.hpp>
25 
27 
28 namespace beluga {
29 namespace {
30 
31 using testing::SE2Near;
33 
34 using Eigen::Vector2d;
35 using Sophus::Matrix3d;
36 using Sophus::SE2d;
37 using Sophus::SO2d;
38 
39 template <class Range, class Hasher>
40 [[nodiscard]] auto precalculate_particle_hashes(Range&& states, const Hasher& spatial_hash_function_) {
41  return states | ranges::views::transform(spatial_hash_function_) | ranges::to<std::vector<std::size_t>>();
42 }
43 
44 struct ClusterBasedEstimationDetailTesting : public ::testing::Test {
45  double kSpatialHashResolution = 1.0;
46  double kAngularHashResolution = Sophus::Constants<double>::pi() / 2.0; // 90 degrees
47  double kTolerance = 1e-6;
48 
49  // spatial hash function used to group particles in cells
53  kAngularHashResolution // theta
54  };
55 
56  [[nodiscard]] auto generate_test_grid_cell_data_map() const {
57  constexpr auto kUpperLimit = 30.0;
58 
59  typename clusterizer_detail::ClusterMap<SE2d> map;
60  for (double x = 0.0; x < kUpperLimit; x += 1.0) {
61  const auto weight = x;
62  const auto state = SE2d{SO2d{0.}, Vector2d{x, x}};
63  map.emplace(spatial_hash_function(state), clusterizer_detail::ClusterCell<SE2d>{state, weight, 0, std::nullopt});
64  }
65  return map;
66  }
67 
68  [[nodiscard]] static auto
69  make_particle_multicluster_dataset(double xmin, double xmax, double ymin, double ymax, double step) {
70  std::vector<std::pair<SE2d, beluga::Weight>> particles;
71 
72  const auto xwidth = xmax - xmin;
73  const auto ywidth = ymax - ymin;
74 
75  // simulate particles in a grid with 4 (2x2) clusters with
76  // different peak heights. The highest peak is the one located on the
77  // upper-right.
78 
79  for (double x = step / 2.0; x <= xwidth; x += step) {
80  for (double y = step / 2.0; y <= ywidth; y += step) {
81  // adjust the height of each of the four peaks
82  const auto k = (2 * x < xwidth ? 0.0 : 1.0) + (2 * y < ywidth ? 0.0 : 2.0) + 1.0;
83  auto weight = std::abs(std::sin(2.0 * M_PI * x / xwidth)) * //
84  std::abs(std::sin(2.0 * M_PI * y / ywidth)) * //
85  k;
86  // add a field of zeros around the peaks to ease predicting the mean and
87  // covariance values of the highest peaks
88  weight = std::max(0.0, weight - k / 2.0);
89  particles.emplace_back(SE2d{SO2d{0.}, Vector2d{x + xmin, y + ymin}}, weight);
90  }
91  }
92 
93  return particles;
94  }
95 };
96 
97 TEST_F(ClusterBasedEstimationDetailTesting, ParticleHashesCalculationStep) {
98  const auto s00 = SE2d{SO2d{0.0}, Vector2d{0.25, 0.25}};
99  const auto s01 = SE2d{SO2d{0.0}, Vector2d{3.75, 0.75}};
100  const auto s10 = SE2d{SO2d{2.0}, Vector2d{0.00, 0.00}};
101  const auto s20 = SE2d{SO2d{2.0}, Vector2d{2.00, 0.00}};
102 
103  const auto states = std::vector{s00, s01, s10, s20};
104 
105  const auto hashes = precalculate_particle_hashes(states, spatial_hash_function);
106 
107  const auto hash00 = spatial_hash_function(s00);
108  const auto hash01 = spatial_hash_function(s01);
109  const auto hash10 = spatial_hash_function(s10);
110  const auto hash20 = spatial_hash_function(s20);
111 
112  ASSERT_EQ(hashes.size(), 4);
113  EXPECT_EQ(hashes[0], hash00);
114  EXPECT_EQ(hashes[1], hash01);
115  EXPECT_EQ(hashes[2], hash10);
116  EXPECT_EQ(hashes[3], hash20);
117 }
118 
119 TEST_F(ClusterBasedEstimationDetailTesting, GridCellDataMapGenerationStep) {
120  const auto s00 = SE2d{SO2d{0.0}, Vector2d{0.25, 0.25}}; // bin 1
121  const auto s01 = SE2d{SO2d{0.0}, Vector2d{0.75, 0.75}}; // bin 1
122  const auto s10 = SE2d{SO2d{2.0}, Vector2d{0.00, 0.00}}; // bin 2
123  const auto s20 = SE2d{SO2d{2.0}, Vector2d{2.00, 0.00}}; // bin 3
124 
125  const auto particles = std::vector<std::pair<SE2d, beluga::Weight>>{
126  std::make_pair(s00, 1.5),
127  std::make_pair(s01, 0.5),
128  std::make_pair(s10, 1.0),
129  std::make_pair(s20, 1.0),
130  };
131 
132  auto states = beluga::views::states(particles);
133  auto weights = beluga::views::weights(particles);
134  auto hashes = states | ranges::views::transform(spatial_hash_function) | ranges::to<std::vector>;
135 
136  auto test_data = clusterizer_detail::make_cluster_map(states, weights, hashes);
137 
138  const auto hash00 = spatial_hash_function(s00);
139  const auto hash10 = spatial_hash_function(s10);
140  const auto hash20 = spatial_hash_function(s20);
141 
142  ASSERT_EQ(test_data.size(), 3);
143  ASSERT_NE(test_data.find(hash00), test_data.end());
144  ASSERT_NE(test_data.find(hash10), test_data.end());
145  ASSERT_NE(test_data.find(hash20), test_data.end());
146 
147  EXPECT_EQ(test_data[hash00].weight, 2.0);
148  EXPECT_EQ(test_data[hash10].weight, 1.0);
149  EXPECT_EQ(test_data[hash20].weight, 1.0);
150 
151  ASSERT_THAT(test_data[hash00].representative_state, SE2Near(s00.so2(), s00.translation(), kTolerance));
152  ASSERT_THAT(test_data[hash10].representative_state, SE2Near(s10.so2(), s10.translation(), kTolerance));
153  ASSERT_THAT(test_data[hash20].representative_state, SE2Near(s20.so2(), s20.translation(), kTolerance));
154 
155  ASSERT_FALSE(test_data[hash00].cluster_id.has_value());
156  ASSERT_FALSE(test_data[hash10].cluster_id.has_value());
157  ASSERT_FALSE(test_data[hash20].cluster_id.has_value());
158 }
159 
160 TEST_F(ClusterBasedEstimationDetailTesting, MakePriorityQueue) {
161  // data preparation
162  auto data = generate_test_grid_cell_data_map();
163 
164  // test proper
166  EXPECT_EQ(prio_queue.size(), data.size());
167 
168  // from there on the weights should be strictly decreasing
169  auto prev_weight = prio_queue.top().priority;
170  while (!prio_queue.empty()) {
171  const auto top = prio_queue.top();
172  EXPECT_GE(prev_weight, top.priority);
173  prev_weight = top.priority;
174  prio_queue.pop();
175  }
176 }
177 
178 TEST_F(ClusterBasedEstimationDetailTesting, MapGridCellsToClustersStep) {
179  const double k_field_side = 36.0;
180  const double k_half_field_side = 18.0;
181 
182  // create a map with four independent peaks
183  std::vector<std::tuple<double, double, double>> coordinates;
184  for (double x = 0.0; x < k_field_side; x += 1.0) {
185  for (double y = 0.0; y < k_field_side; y += 1.0) {
186  const auto weight = std::abs(std::sin(10.0 * x * M_PI / 180.0)) * std::abs(std::sin(10.0 * y * M_PI / 180.0));
187  coordinates.emplace_back(x, y, weight);
188  }
189  }
190 
191  typename clusterizer_detail::ClusterMap<SE2d> map;
192 
193  for (const auto& [x, y, w] : coordinates) {
194  const auto state = SE2d{SO2d{0.}, Vector2d{x, y}};
195  map.emplace(spatial_hash_function(state), clusterizer_detail::ClusterCell<SE2d>{state, w, 0, std::nullopt});
196  }
197 
198  const auto neighbors = [&](const auto& state) {
199  static const auto kAdjacentGridCells = {
204  };
205 
206  return kAdjacentGridCells | //
207  ranges::views::transform([&state](const Sophus::SE2d& neighbor_pose) { return state * neighbor_pose; }) |
208  ranges::views::transform(spatial_hash_function);
209  };
210 
211  // test proper
212 
213  // only cells beyond the 10% weight percentile to avoid the messy border
214  // between clusters beneath that threshold
215  const auto ten_percent_threshold = calculate_percentile_threshold(
216  map | ranges::views::values | ranges::views::transform(&clusterizer_detail::ClusterCell<SE2d>::weight), 0.15);
217 
218  clusterizer_detail::assign_clusters(map, neighbors);
219 
220  auto cells_above_minimum_threshold_view =
221  coordinates | ranges::views::filter([&](const auto& c) { return std::get<2>(c) >= ten_percent_threshold; });
222 
223  const auto right_side_cell = [&](const auto& c) { return std::get<0>(c) >= k_half_field_side; };
224  const auto left_side_cell = [&](const auto& c) { return !right_side_cell(c); };
225  const auto top_side_cell = [&](const auto& c) { return std::get<1>(c) >= k_half_field_side; };
226  const auto bottom_side_cell = [&](const auto& c) { return !top_side_cell(c); };
227 
228  auto quadrant_1_view = cells_above_minimum_threshold_view | //
229  ranges::views::filter(left_side_cell) | //
230  ranges::views::filter(bottom_side_cell);
231  auto quadrant_2_view = cells_above_minimum_threshold_view | //
232  ranges::views::filter(right_side_cell) | //
233  ranges::views::filter(bottom_side_cell);
234  auto quadrant_3_view = cells_above_minimum_threshold_view | //
235  ranges::views::filter(left_side_cell) | //
236  ranges::views::filter(top_side_cell);
237  auto quadrant_4_view = cells_above_minimum_threshold_view | //
238  ranges::views::filter(right_side_cell) | //
239  ranges::views::filter(top_side_cell);
240 
241  const auto coord_to_hash = [&](const auto& coords) {
242  const auto& [x, y, w] = coords;
243  const auto state = SE2d{SO2d{0.}, Vector2d{x, y}};
245  };
246 
247  const auto hash_to_id = [&](const auto& hash) { return map[hash].cluster_id.value(); };
248 
249  auto quadrant_1_unique_ids = quadrant_1_view | //
250  ranges::views::transform(coord_to_hash) | //
251  ranges::views::transform(hash_to_id) | //
252  ranges::to<std::vector<std::size_t>>() | //
253  ranges::actions::sort | //
254  ranges::actions::unique; //
255  auto quadrant_2_unique_ids = quadrant_2_view | //
256  ranges::views::transform(coord_to_hash) | //
257  ranges::views::transform(hash_to_id) | //
258  ranges::to<std::vector<std::size_t>>() | //
259  ranges::actions::sort | //
260  ranges::actions::unique; //
261  auto quadrant_3_unique_ids = quadrant_3_view | //
262  ranges::views::transform(coord_to_hash) | //
263  ranges::views::transform(hash_to_id) | //
264  ranges::to<std::vector<std::size_t>>() | //
265  ranges::actions::sort | //
266  ranges::actions::unique; //
267  auto quadrant_4_unique_ids = quadrant_4_view | //
268  ranges::views::transform(coord_to_hash) | //
269  ranges::views::transform(hash_to_id) | //
270  ranges::to<std::vector<std::size_t>>() | //
271  ranges::actions::sort | //
272  ranges::actions::unique; //
273 
274  auto full_field_unique_ids = cells_above_minimum_threshold_view | //
275  ranges::views::transform(coord_to_hash) | //
276  ranges::views::transform(hash_to_id) | //
277  ranges::to<std::vector<std::size_t>>() | //
278  ranges::actions::sort | //
279  ranges::actions::unique; //
280 
281  // check that each quadrant receives its own cluster id, and that
282  // there are four clusters in total
283  EXPECT_EQ(quadrant_1_unique_ids.size(), 1);
284  EXPECT_EQ(quadrant_2_unique_ids.size(), 1);
285  EXPECT_EQ(quadrant_3_unique_ids.size(), 1);
286  EXPECT_EQ(quadrant_4_unique_ids.size(), 1);
287 
288  EXPECT_EQ(full_field_unique_ids.size(), 4);
289 }
290 
291 TEST_F(ClusterBasedEstimationDetailTesting, ClusterStateEstimationStep) {
292  const double k_field_side = 36.0;
293 
294  // create a map with four independent peaks
295  auto particles = make_particle_multicluster_dataset(0.0, k_field_side, 0.0, k_field_side, 1.0);
296 
297  const auto clusters =
298  ParticleClusterizer{ParticleClusterizerParam{kSpatialHashResolution, kAngularHashResolution, 0.9}}(
299  beluga::views::states(particles), beluga::views::weights(particles));
300 
301  auto per_cluster_estimates =
302  estimate_clusters(beluga::views::states(particles), beluga::views::weights(particles), clusters);
303 
304  // check that the number of clusters is correct
305  ASSERT_EQ(per_cluster_estimates.size(), 4);
306 
307  // order by decreasing weight
308  ranges::sort(per_cluster_estimates, std::less{}, [](const auto& e) { return e.weight; });
309 
310  // check that the cluster means were found in the expected order
311  EXPECT_THAT(per_cluster_estimates[0].mean, SE2Near(SO2d{0.0}, Vector2d{9.0, 9.0}, kTolerance));
312  EXPECT_THAT(per_cluster_estimates[1].mean, SE2Near(SO2d{0.0}, Vector2d{27.0, 9.0}, kTolerance));
313  EXPECT_THAT(per_cluster_estimates[2].mean, SE2Near(SO2d{0.0}, Vector2d{9.0, 27.0}, kTolerance));
314  EXPECT_THAT(per_cluster_estimates[3].mean, SE2Near(SO2d{0.0}, Vector2d{27.0, 27.0}, kTolerance));
315 }
316 
317 TEST_F(ClusterBasedEstimationDetailTesting, ClusterEstimation) {
318  // test the weights have effect by selecting a few states and ignoring others
319  const auto states = std::vector{
320  SE2d{SO2d{Sophus::Constants<double>::pi() / 6}, Vector2d{0.0, -3.0}}, //
321  SE2d{SO2d{Sophus::Constants<double>::pi() / 2}, Vector2d{1.0, -2.0}}, //
322  SE2d{SO2d{Sophus::Constants<double>::pi() / 3}, Vector2d{2.0, -1.0}}, //
323  SE2d{SO2d{Sophus::Constants<double>::pi() / 2}, Vector2d{1.0, -2.0}}, //
324  SE2d{SO2d{Sophus::Constants<double>::pi() / 6}, Vector2d{2.0, -3.0}}, //
325  SE2d{SO2d{Sophus::Constants<double>::pi() / 2}, Vector2d{3.0, -2.0}}, //
326  SE2d{SO2d{Sophus::Constants<double>::pi() / 3}, Vector2d{4.0, -2.0}}, //
327  SE2d{SO2d{Sophus::Constants<double>::pi() / 2}, Vector2d{0.0, -3.0}}, //
328  };
329 
330  // cluster 0 has the max weight, except for cluster 3 which will be ignored because it has only a single particle
331  const auto weights = std::vector{0.5, 0.5, 0.2, 0.3, 0.3, 0.2, 0.2, 1.0};
332  const auto clusters = std::vector{0, 0, 1, 2, 2, 1, 1, 3};
333 
334  const auto particles = ranges::views::zip(states, weights, clusters);
335 
336  auto cluster_0_particles =
337  particles | ranges::views::cache1 | ranges::views::filter([](const auto& p) { return std::get<2>(p) == 0; });
338 
339  auto cluster_0_states = cluster_0_particles | beluga::views::elements<0>;
340  auto cluster_0_weights = cluster_0_particles | beluga::views::elements<1>;
341 
342  const auto [expected_pose, expected_covariance] = beluga::estimate(cluster_0_states, cluster_0_weights);
343  const auto per_cluster_estimates = beluga::estimate_clusters(states, weights, clusters);
344 
345  ASSERT_EQ(per_cluster_estimates.size(), 3); // cluster 3 should be ignored because it has only one particle
346 
347  const auto [_, pose, covariance] =
348  *ranges::max_element(per_cluster_estimates, std::less{}, [](const auto& e) { return e.weight; });
349 
350  constexpr double kTolerance = 0.001;
351 
352  ASSERT_THAT(pose, SE2Near(expected_pose.so2(), expected_pose.translation(), kTolerance));
353  ASSERT_THAT(covariance.col(0).eval(), Vector3Near(expected_covariance.col(0).eval(), kTolerance));
354  ASSERT_THAT(covariance.col(1).eval(), Vector3Near(expected_covariance.col(1).eval(), kTolerance));
355  ASSERT_THAT(covariance.col(2).eval(), Vector3Near(expected_covariance.col(2).eval(), kTolerance));
356 }
357 
358 TEST_F(ClusterBasedEstimationDetailTesting, HeaviestClusterSelectionTest) {
359  const auto particles = make_particle_multicluster_dataset(-2.0, +2.0, -2.0, +2.0, 0.025);
360 
361  // determine the expected values of the mean and covariance of the highest
362  // weight cluster
363  const auto max_peak_filter = [](const auto& s) { return s.translation().x() >= 0.0 && s.translation().y() >= 0.0; };
364  const auto mask_filter = [](const auto& sample) { return std::get<1>(sample); };
365 
366  auto max_peak_mask = beluga::views::states(particles) | ranges::views::transform(max_peak_filter);
367  auto max_peak_masked_states = ranges::views::zip(beluga::views::states(particles), max_peak_mask) |
368  ranges::views::filter(mask_filter) | beluga::views::elements<0>;
369  auto max_peak_masked_weights = ranges::views::zip(beluga::views::weights(particles), max_peak_mask) |
370  ranges::views::filter(mask_filter) | beluga::views::elements<0>;
371 
372  const auto [expected_pose, expected_covariance] = beluga::estimate(max_peak_masked_states, max_peak_masked_weights);
373 
374  const auto [pose, covariance] =
376 
377  ASSERT_THAT(pose, SE2Near(expected_pose.so2(), expected_pose.translation(), kTolerance));
378  ASSERT_NEAR(covariance(0, 0), expected_covariance(0, 0), 0.001);
379  ASSERT_NEAR(covariance(0, 1), expected_covariance(0, 1), 0.001);
380  ASSERT_NEAR(covariance(0, 2), expected_covariance(0, 2), 0.001);
381  ASSERT_NEAR(covariance(1, 0), expected_covariance(1, 0), 0.001);
382  ASSERT_NEAR(covariance(1, 1), expected_covariance(1, 1), 0.001);
383  ASSERT_NEAR(covariance(1, 2), expected_covariance(1, 2), 0.001);
384  ASSERT_NEAR(covariance(2, 0), expected_covariance(2, 0), 0.001);
385  ASSERT_NEAR(covariance(2, 1), expected_covariance(2, 1), 0.001);
386  ASSERT_NEAR(covariance(2, 2), expected_covariance(2, 2), 0.001);
387 }
388 
389 TEST_F(ClusterBasedEstimationDetailTesting, NightmareDistributionTest) {
390  // particles so far away that they are isolated and will therefore form four separate single
391  // particle clusters
392  const auto states = std::vector{
393  SE2d{SO2d{0.0}, Vector2d{-10.0, -10.0}}, //
394  SE2d{SO2d{0.0}, Vector2d{-10.0, +10.0}}, //
395  SE2d{SO2d{0.0}, Vector2d{+10.0, -10.0}}, //
396  SE2d{SO2d{0.0}, Vector2d{+10.0, +10.0}}};
397  const auto weights = std::vector<beluga::Weight>{0.2, 0.2, 0.2, 0.2};
398 
399  // in this case, the cluster algorithm will not be able to group the particles and will
400  // default to the set mean and covariance
401  const auto [expected_pose, expected_covariance] = beluga::estimate(states, weights);
402 
403  auto particles = ranges::views::zip(states, weights) | ranges::to<std::vector>();
404  const auto [pose, covariance] = beluga::cluster_based_estimate(states, weights);
405 
406  ASSERT_THAT(pose, SE2Near(expected_pose.so2(), expected_pose.translation(), kTolerance));
407  ASSERT_NEAR(covariance(0, 0), expected_covariance(0, 0), 0.001);
408  ASSERT_NEAR(covariance(0, 1), expected_covariance(0, 1), 0.001);
409  ASSERT_NEAR(covariance(0, 2), expected_covariance(0, 2), 0.001);
410  ASSERT_NEAR(covariance(1, 0), expected_covariance(1, 0), 0.001);
411  ASSERT_NEAR(covariance(1, 1), expected_covariance(1, 1), 0.001);
412  ASSERT_NEAR(covariance(1, 2), expected_covariance(1, 2), 0.001);
413  ASSERT_NEAR(covariance(2, 0), expected_covariance(2, 0), 0.001);
414  ASSERT_NEAR(covariance(2, 1), expected_covariance(2, 1), 0.001);
415  ASSERT_NEAR(covariance(2, 2), expected_covariance(2, 2), 0.001);
416 }
417 
418 } // namespace
419 
420 } // namespace beluga
kTolerance
double kTolerance
Definition: test_cluster_based_estimation.cpp:47
beluga::state
constexpr state_detail::state_fn state
Customization point object for accessing the state of a particle.
Definition: primitives.hpp:163
beluga::clusterizer_detail::assign_clusters
static void assign_clusters(ClusterMap< State > &map, NeighborsFunction &&neighbors_function)
Assign cluster ids to an existing cluster map.
Definition: cluster_based_estimation.hpp:203
views.hpp
Implementation of useful container views.
beluga::cluster_based_estimate
auto cluster_based_estimate(States &&states, Weights &&weights, ParticleClusterizerParam parameters={})
Computes a cluster-based estimate from a particle set.
Definition: cluster_based_estimation.hpp:414
spatial_hash_function
beluga::spatial_hash< Sophus::SE2d > spatial_hash_function
Definition: test_cluster_based_estimation.cpp:50
beluga::testing::SE2Near
auto SE2Near(const Sophus::SE2< Scalar > &t, Scalar e)
SE2 element matcher.
Definition: sophus_matchers.hpp:83
beluga::weight
constexpr weight_detail::weight_fn weight
Customization point object for accessing the weight of a particle.
Definition: primitives.hpp:264
Sophus::Matrix3d
Matrix3< double > Matrix3d
cluster_based_estimation.hpp
Implementation of a cluster-based estimation algorithm.
beluga::clusterizer_detail::ClusterCell::weight
double weight
average weight of the cell
Definition: cluster_based_estimation.hpp:114
beluga::testing::Vector3Near
auto Vector3Near(const Sophus::Vector3< Scalar > &t, Scalar e)
Vector3 element matcher.
Definition: sophus_matchers.hpp:58
beluga::clusterizer_detail::make_cluster_map
static auto make_cluster_map(States &&states, Weights &&weights, Hashes &&hashes)
Create a cluster map from a range of particles and their corresponding spatial hashes.
Definition: cluster_based_estimation.hpp:138
se2.hpp
Sophus::SE2
Vector2d
Vector2< double > Vector2d
kAngularHashResolution
double kAngularHashResolution
Definition: test_cluster_based_estimation.cpp:46
beluga::spatial_hash
Callable class, allowing to calculate the hash of a particle state.
Definition: spatial_hash.hpp:100
beluga::estimate_clusters
auto estimate_clusters(States &&states, Weights &&weights, Clusters &&clusters)
For each cluster, estimate the mean and covariance of the states that belong to it.
Definition: cluster_based_estimation.hpp:336
kSpatialHashResolution
double kSpatialHashResolution
Definition: test_cluster_based_estimation.cpp:45
beluga::clusterizer_detail::calculate_percentile_threshold
auto calculate_percentile_threshold(Range &&range, double percentile)
Calculates the threshold value at a specified percentile from a range.
Definition: cluster_based_estimation.hpp:103
sophus_matchers.hpp
Implements GTest matchers for Sophus/Eigen types.
Sophus::Constants::pi
static SOPHUS_FUNC Scalar pi()
beluga::estimate
std::pair< Sophus::SE2< Scalar >, Sophus::Matrix3< Scalar > > estimate(Poses &&poses, Weights &&weights)
Returns a pair consisting of the estimated mean pose and its covariance.
Definition: estimation.hpp:129
beluga::clusterizer_detail::make_priority_queue
auto make_priority_queue(const Map &map, Proj &&proj)
Create a priority queue from a map using a specified projection.
Definition: cluster_based_estimation.hpp:72
beluga::views::weights
constexpr auto weights
Definition: particles.hpp:34
spatial_hash.hpp
Implementation of a spatial hash for N dimensional states.
Sophus::SE2d
SE2< double > SE2d
Sophus::SO2d
SO2< double > SO2d
beluga::views::zip
constexpr detail::zip_fn zip
Given N ranges, return a new range where the Mth element is a tuple of the Mth elements of all N rang...
Definition: zip.hpp:74
beluga::views::states
constexpr auto states
Definition: particles.hpp:30
beluga
The main Beluga namespace.
Definition: 3d_embedding.hpp:21
beluga::views::sample
constexpr ranges::views::view_closure< detail::sample_fn > sample
Definition: sample.hpp:240


beluga
Author(s):
autogenerated on Tue Jul 16 2024 02:59:53