testHybridEstimation.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
19 #include <gtsam/geometry/Pose2.h>
20 #include <gtsam/geometry/Pose3.h>
27 #include <gtsam/inference/Symbol.h>
36 
37 // Include for test suite
39 
40 #include <string>
41 
42 #include "Switching.h"
43 
44 using namespace std;
45 using namespace gtsam;
46 
49 
50 namespace estimation_fixture {
51 std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6,
52  7, 8, 9, 9, 9, 10, 11, 11, 11, 11};
53 // Ground truth discrete seq
54 std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0,
55  1, 1, 1, 0, 0, 1, 1, 0, 0, 0};
56 
58  const size_t K, const double between_sigma, const double measurement_sigma,
59  const std::vector<double>& measurements,
60  const std::string& transitionProbabilityTable,
62  Switching switching(K, between_sigma, measurement_sigma, measurements,
63  transitionProbabilityTable);
64 
65  // Add prior on M(0)
67 
68  // Add the X(0) prior
70  initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
71 
72  return switching;
73 }
74 
75 } // namespace estimation_fixture
76 
77 TEST(HybridEstimation, Full) {
78  size_t K = 6;
79  std::vector<double> measurements = {0, 1, 2, 2, 2, 3};
80  // Ground truth discrete seq
81  std::vector<size_t> discrete_seq = {1, 1, 0, 0, 1};
82  // Switching example of robot moving in 1D
83  // with given measurements and equal mode priors.
84  Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1");
86 
87  Ordering hybridOrdering;
88  for (size_t k = 0; k < K; k++) {
89  hybridOrdering.push_back(X(k));
90  }
91  for (size_t k = 0; k < K - 1; k++) {
92  hybridOrdering.push_back(M(k));
93  }
94 
95  HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential();
96 
97  EXPECT_LONGS_EQUAL(2 * K - 1, bayesNet->size());
98 
100 
102  Values result = initial.retract(delta.continuous());
103 
104  DiscreteValues expected_discrete;
105  for (size_t k = 0; k < K - 1; k++) {
106  expected_discrete[M(k)] = discrete_seq[k];
107  }
108  EXPECT(assert_equal(expected_discrete, delta.discrete()));
109 
110  Values expected_continuous;
111  for (size_t k = 0; k < K; k++) {
112  expected_continuous.insert(X(k), measurements[k]);
113  }
114  EXPECT(assert_equal(expected_continuous, result));
115 }
116 
117 /****************************************************************************/
118 // Test approximate inference with an additional pruning step.
119 TEST(HybridEstimation, IncrementalSmoother) {
120  using namespace estimation_fixture;
121 
122  size_t K = 15;
123 
124  // Switching example of robot moving in 1D
125  // with given measurements and equal mode priors.
127  Values initial;
129  "1/1 1/1", graph, initial);
130  HybridSmoother smoother;
131 
132  HybridGaussianFactorGraph linearized;
133 
134  constexpr size_t maxNrLeaves = 3;
135  for (size_t k = 1; k < K; k++) {
136  if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
137  graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
138  graph.push_back(switching.unaryFactors.at(k)); // Measurement
139 
140  initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
141 
142  linearized = *graph.linearize(initial);
143  Ordering ordering = smoother.getOrdering(linearized);
144 
145  smoother.update(linearized, maxNrLeaves, ordering);
146  graph.resize(0);
147  }
148 
149  HybridValues delta = smoother.hybridBayesNet().optimize();
150 
151  Values result = initial.retract(delta.continuous());
152 
153  DiscreteValues expected_discrete;
154  for (size_t k = 0; k < K - 1; k++) {
155  expected_discrete[M(k)] = discrete_seq[k];
156  }
157  EXPECT(assert_equal(expected_discrete, delta.discrete()));
158 
159  Values expected_continuous;
160  for (size_t k = 0; k < K; k++) {
161  expected_continuous.insert(X(k), measurements[k]);
162  }
163  EXPECT(assert_equal(expected_continuous, result));
164 }
165 
166 /****************************************************************************/
167 // Test if pruned factor is set to correct error and no errors are thrown.
168 TEST(HybridEstimation, ValidPruningError) {
169  using namespace estimation_fixture;
170 
171  size_t K = 8;
172 
174  Values initial;
176  "1/1 1/1", graph, initial);
177  HybridSmoother smoother;
178 
179  HybridGaussianFactorGraph linearized;
180 
181  constexpr size_t maxNrLeaves = 3;
182  for (size_t k = 1; k < K; k++) {
183  if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
184  graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
185  graph.push_back(switching.unaryFactors.at(k)); // Measurement
186 
187  initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
188 
189  linearized = *graph.linearize(initial);
190  Ordering ordering = smoother.getOrdering(linearized);
191 
192  smoother.update(linearized, maxNrLeaves, ordering);
193 
194  graph.resize(0);
195  }
196 
197  HybridValues delta = smoother.hybridBayesNet().optimize();
198 
199  Values result = initial.retract(delta.continuous());
200 
201  DiscreteValues expected_discrete;
202  for (size_t k = 0; k < K - 1; k++) {
203  expected_discrete[M(k)] = discrete_seq[k];
204  }
205  EXPECT(assert_equal(expected_discrete, delta.discrete()));
206 
207  Values expected_continuous;
208  for (size_t k = 0; k < K; k++) {
209  expected_continuous.insert(X(k), measurements[k]);
210  }
211  EXPECT(assert_equal(expected_continuous, result));
212 }
213 
214 /****************************************************************************/
215 // Test approximate inference with an additional pruning step.
216 TEST(HybridEstimation, ISAM) {
217  using namespace estimation_fixture;
218 
219  size_t K = 15;
220 
221  // Switching example of robot moving in 1D
222  // with given measurements and equal mode priors.
224  Values initial;
226  "1/1 1/1", graph, initial);
228 
229  HybridGaussianFactorGraph linearized;
230 
231  const size_t maxNrLeaves = 3;
232  for (size_t k = 1; k < K; k++) {
233  if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
234  graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
235  graph.push_back(switching.unaryFactors.at(k)); // Measurement
236 
237  initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
238 
239  isam.update(graph, initial, maxNrLeaves);
240  // isam.saveGraph("NLiSAM" + std::to_string(k) + ".dot");
241  // GTSAM_PRINT(isam);
242 
243  graph.resize(0);
244  initial.clear();
245  }
246 
248  DiscreteValues assignment = isam.assignment();
249 
250  DiscreteValues expected_discrete;
251  for (size_t k = 0; k < K - 1; k++) {
252  expected_discrete[M(k)] = discrete_seq[k];
253  }
254  EXPECT(assert_equal(expected_discrete, assignment));
255 
256  Values expected_continuous;
257  for (size_t k = 0; k < K; k++) {
258  expected_continuous.insert(X(k), measurements[k]);
259  }
260  EXPECT(assert_equal(expected_continuous, result));
261 }
262 
275  Ordering continuous(graph.continuousKeySet());
276  const auto [bayesNet, remainingGraph] =
277  graph.eliminatePartialSequential(continuous);
278 
279  auto last_conditional = bayesNet->at(bayesNet->size() - 1);
280  DiscreteKeys discrete_keys = last_conditional->discreteKeys();
281 
282  const std::vector<DiscreteValues> assignments =
283  DiscreteValues::CartesianProduct(discrete_keys);
284 
285  std::reverse(discrete_keys.begin(), discrete_keys.end());
286 
287  vector<VectorValues::shared_ptr> vector_values;
288  for (const DiscreteValues& assignment : assignments) {
289  VectorValues values = bayesNet->optimize(assignment);
290  vector_values.push_back(std::make_shared<VectorValues>(values));
291  }
292  DecisionTree<Key, VectorValues::shared_ptr> delta_tree(discrete_keys,
293  vector_values);
294 
295  // Get the probPrime tree with the correct leaf probabilities
296  std::vector<double> probPrimes;
297  for (const DiscreteValues& assignment : assignments) {
298  VectorValues delta = *delta_tree(assignment);
299 
300  // If VectorValues is empty, it means this is a pruned branch.
301  // Set the probPrime to 0.0.
302  if (delta.size() == 0) {
303  probPrimes.push_back(0.0);
304  continue;
305  }
306 
307  double error = graph.error({delta, assignment});
308  probPrimes.push_back(exp(-error));
309  }
310  AlgebraicDecisionTree<Key> probPrimeTree(discrete_keys, probPrimes);
311  return probPrimeTree;
312 }
313 
314 /*********************************************************************************
315  * Test for correctness of different branches of the P'(Continuous | Discrete).
316  * The values should match those of P'(Continuous) for each discrete mode.
317  ********************************************************************************/
318 TEST(HybridEstimation, Probability) {
319  using namespace estimation_fixture;
320 
321  constexpr size_t K = 4;
322  double between_sigma = 1.0, measurement_sigma = 0.1;
323 
324  // Switching example of robot moving in 1D with
325  // given measurements and equal mode priors.
326  Switching switching(K, between_sigma, measurement_sigma, measurements,
327  "1/1 1/1");
329 
330  // Continuous elimination
331  Ordering continuous_ordering(graph.continuousKeySet());
332  auto [bayesNet, discreteGraph] =
333  graph.eliminatePartialSequential(continuous_ordering);
334 
335  // Discrete elimination
336  Ordering discrete_ordering(graph.discreteKeySet());
337  auto discreteBayesNet = discreteGraph->eliminateSequential(discrete_ordering);
338 
339  // Add the discrete conditionals to make it a full bayes net.
340  for (auto discrete_conditional : *discreteBayesNet) {
341  bayesNet->add(discrete_conditional);
342  }
343 
344  HybridValues hybrid_values = bayesNet->optimize();
345 
346  // This is the correct sequence as designed
347  DiscreteValues expectedSequence{{M(0), 1}, {M(1), 1}, {M(2), 0}};
348  EXPECT(assert_equal(expectedSequence, hybrid_values.discrete()));
349 }
350 
351 /****************************************************************************/
357 TEST(HybridEstimation, ProbabilityMultifrontal) {
358  using namespace estimation_fixture;
359 
360  constexpr size_t K = 4;
361 
362  double between_sigma = 1.0, measurement_sigma = 0.1;
363 
364  // Switching example of robot moving in 1D with given measurements and equal
365  // mode priors.
366  Switching switching(K, between_sigma, measurement_sigma, measurements,
367  "1/1 1/1");
369 
370  // Get the tree of unnormalized probabilities for each mode sequence.
371  AlgebraicDecisionTree<Key> expected_probPrimeTree = GetProbPrimeTree(graph);
372 
373  // Eliminate continuous
374  Ordering continuous_ordering(graph.continuousKeySet());
375  const auto [bayesTree, discreteGraph] =
376  graph.eliminatePartialMultifrontal(continuous_ordering);
377 
378  // Get the last continuous conditional which will have all the discrete keys
379  Key last_continuous_key =
380  continuous_ordering.at(continuous_ordering.size() - 1);
381  auto last_conditional = (*bayesTree)[last_continuous_key]->conditional();
382  DiscreteKeys discrete_keys = last_conditional->discreteKeys();
383 
384  Ordering discrete(graph.discreteKeySet());
385  auto discreteBayesTree = discreteGraph->eliminateMultifrontal(discrete);
386 
387  EXPECT_LONGS_EQUAL(1, discreteBayesTree->size());
388  // DiscreteBayesTree should have only 1 clique
389  auto discrete_clique = (*discreteBayesTree)[discrete.at(0)];
390 
391  std::set<HybridBayesTreeClique::shared_ptr> clique_set;
392  for (auto node : bayesTree->nodes()) {
393  clique_set.insert(node.second);
394  }
395 
396  // Set the root of the bayes tree as the discrete clique
397  for (auto clique : clique_set) {
398  if (clique->conditional()->parents() ==
399  discrete_clique->conditional()->frontals()) {
400  discreteBayesTree->addClique(clique, discrete_clique);
401 
402  } else {
403  // Remove the clique from the children of the parents since
404  // it will get added again in addClique.
405  auto clique_it = std::find(clique->parent()->children.begin(),
406  clique->parent()->children.end(), clique);
407  clique->parent()->children.erase(clique_it);
408  discreteBayesTree->addClique(clique, clique->parent());
409  }
410  }
411 
412  HybridValues hybrid_values = discreteBayesTree->optimize();
413 
414  // This is the correct sequence as designed
415  DiscreteValues expectedSequence{{M(0), 1}, {M(1), 1}, {M(2), 0}};
416  EXPECT(assert_equal(expectedSequence, hybrid_values.discrete()));
417 }
418 
419 /*********************************************************************************
420  // Create a hybrid nonlinear factor graph f(x0, x1, m0; z0, z1)
421  ********************************************************************************/
424 
425  constexpr double sigma = 0.5; // measurement noise
426  const auto noise_model = noiseModel::Isotropic::Sigma(1, sigma);
427 
428  // Add "measurement" factors:
431 
432  // Add hybrid nonlinear factor:
433  DiscreteKey m(M(0), 2);
434  const auto zero_motion =
435  std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
436  const auto one_motion =
437  std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
438  std::vector<NoiseModelFactor::shared_ptr> components = {zero_motion,
439  one_motion};
441 
442  return nfg;
443 }
444 
445 /*********************************************************************************
446  // Create a hybrid linear factor graph f(x0, x1, m0; z0, z1)
447  ********************************************************************************/
450 
451  Values initial;
452  double z0 = 0.0, z1 = 1.0;
453  initial.insert<double>(X(0), z0);
454  initial.insert<double>(X(1), z1);
455  return nfg.linearize(initial);
456 }
457 
458 /*********************************************************************************
459  * Do hybrid elimination and do regression test on discrete conditional.
460  ********************************************************************************/
461 TEST(HybridEstimation, EliminateSequentialRegression) {
462  // Create the factor graph from the nonlinear factor graph.
464 
465  // Create expected discrete conditional on m0.
466  DiscreteKey m(M(0), 2);
467  DiscreteConditional expected(m % "0.51341712/1"); // regression
468 
469  // Eliminate into BN using one ordering
470  const Ordering ordering1{X(0), X(1), M(0)};
471  HybridBayesNet::shared_ptr bn1 = fg->eliminateSequential(ordering1);
472 
473  // Check that the discrete conditional matches the expected.
474  auto dc1 = bn1->back()->asDiscrete();
475  EXPECT(assert_equal(expected, *dc1, 1e-9));
476 
477  // Eliminate into BN using a different ordering
478  const Ordering ordering2{X(0), X(1), M(0)};
479  HybridBayesNet::shared_ptr bn2 = fg->eliminateSequential(ordering2);
480 
481  // Check that the discrete conditional matches the expected.
482  auto dc2 = bn2->back()->asDiscrete();
483  EXPECT(assert_equal(expected, *dc2, 1e-9));
484 }
485 
486 /*********************************************************************************
487  * Test for correctness via sampling.
488  *
489  * Compute the conditional P(x0, m0, x1| z0, z1)
490  * with measurements z0, z1. To do so, we:
491  * 1. Start with the corresponding Factor Graph `FG`.
492  * 2. Eliminate the factor graph into a Bayes Net `BN`.
493  * 3. Sample from the Bayes Net.
494  * 4. Check that the ratio `BN(x)/FG(x) = constant` for all samples `x`.
495  ********************************************************************************/
496 TEST(HybridEstimation, CorrectnessViaSampling) {
497  // 1. Create the factor graph from the nonlinear factor graph.
498  const auto fg = CreateHybridGaussianFactorGraph();
499 
500  // 2. Eliminate into BN
501  const HybridBayesNet::shared_ptr bn = fg->eliminateSequential();
502 
503  // Set up sampling
504  std::mt19937_64 rng(11);
505 
506  // Compute the log-ratio between the Bayes net and the factor graph.
507  auto compute_ratio = [&](const HybridValues& sample) -> double {
508  return bn->evaluate(sample) / fg->probPrime(sample);
509  };
510 
511  // The error evaluated by the factor graph and the Bayes net should differ by
512  // the normalizing term computed via the Bayes net determinant.
513  const HybridValues sample = bn->sample(&rng);
514  double expected_ratio = compute_ratio(sample);
515 
516  // 3. Do sampling
517  constexpr int num_samples = 10;
518  for (size_t i = 0; i < num_samples; i++) {
519  // Sample from the bayes net
520  const HybridValues sample = bn->sample(&rng);
521 
522  // 4. Check that the ratio is constant.
523  EXPECT_DOUBLES_EQUAL(expected_ratio, compute_ratio(sample), 1e-6);
524  }
525 }
526 
527 /****************************************************************************/
528 TEST(HybridEstimation, ModeSelection) {
530  Values initial;
531 
532  auto measurement_model = noiseModel::Isotropic::Sigma(1, 0.1);
533  auto motion_model = noiseModel::Isotropic::Sigma(1, 1.0);
534 
535  graph.emplace_shared<PriorFactor<double>>(X(0), 0.0, measurement_model);
536  graph.emplace_shared<PriorFactor<double>>(X(1), 0.0, measurement_model);
537 
538  // The size of the noise model
539  size_t d = 1;
540  double noise_tight = 0.5, noise_loose = 5.0;
541 
542  auto model0 = std::make_shared<MotionModel>(
543  X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)),
544  model1 = std::make_shared<MotionModel>(
545  X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight));
546  std::vector<NoiseModelFactor::shared_ptr> components = {model0, model1};
547 
549 
550  initial.insert(X(0), 0.0);
551  initial.insert(X(1), 0.0);
552 
553  auto gmf = mf.linearize(initial);
554  graph.add(gmf);
555 
556  auto gfg = graph.linearize(initial);
557 
558  HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential();
559 
561  EXPECT_LONGS_EQUAL(1, delta.discrete().at(M(0)));
562 
563  /**************************************************************/
564  HybridBayesNet bn;
565  const DiscreteKey mode(M(0), 2);
566 
567  bn.push_back(
568  GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
569  bn.push_back(
570  GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
571 
572  std::vector<std::pair<Vector, double>> parameters{{Z_1x1, noise_loose},
573  {Z_1x1, noise_tight}};
574  bn.emplace_shared<HybridGaussianConditional>(mode, Z(0), I_1x1, X(0), -I_1x1,
575  X(1), parameters);
576 
578  vv.insert(Z(0), Z_1x1);
579 
580  auto fg = bn.toFactorGraph(vv);
581  auto expected_posterior = fg.eliminateSequential();
582 
583  EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6));
584 }
585 
586 /****************************************************************************/
587 TEST(HybridEstimation, ModeSelection2) {
588  using symbol_shorthand::Z;
589 
590  // The size of the noise model
591  size_t d = 3;
592  double noise_tight = 0.5, noise_loose = 5.0;
593 
594  HybridBayesNet bn;
595  const DiscreteKey mode(M(0), 2);
596 
597  bn.push_back(
598  GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
599  bn.push_back(
600  GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
601 
602  std::vector<std::pair<Vector, double>> parameters{{Z_3x1, noise_loose},
603  {Z_3x1, noise_tight}};
604  bn.emplace_shared<HybridGaussianConditional>(mode, Z(0), I_3x3, X(0), -I_3x3,
605  X(1), parameters);
606 
608  vv.insert(Z(0), Z_3x1);
609 
610  auto fg = bn.toFactorGraph(vv);
611 
612  auto expected_posterior = fg.eliminateSequential();
613 
614  // =====================================
615 
617  Values initial;
618 
619  auto measurement_model = noiseModel::Isotropic::Sigma(d, 0.1);
620  auto motion_model = noiseModel::Isotropic::Sigma(d, 1.0);
621 
622  graph.emplace_shared<PriorFactor<Vector3>>(X(0), Z_3x1, measurement_model);
623  graph.emplace_shared<PriorFactor<Vector3>>(X(1), Z_3x1, measurement_model);
624 
625  auto model0 = std::make_shared<BetweenFactor<Vector3>>(
626  X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)),
628  X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight));
629  std::vector<NoiseModelFactor::shared_ptr> components = {model0, model1};
630 
632 
633  initial.insert<Vector3>(X(0), Z_3x1);
634  initial.insert<Vector3>(X(1), Z_3x1);
635 
636  auto gmf = mf.linearize(initial);
637  graph.add(gmf);
638 
639  auto gfg = graph.linearize(initial);
640 
641  HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential();
642 
643  EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6));
644 }
645 
646 /* ************************************************************************* */
647 int main() {
648  TestResult tr;
649  return TestRegistry::runAllTests(tr);
650 }
651 /* ************************************************************************* */
TestRegistry::runAllTests
static int runAllTests(TestResult &result)
Definition: TestRegistry.cpp:27
gtsam::HybridSmoother::hybridBayesNet
const HybridBayesNet & hybridBayesNet() const
Return the Bayes Net posterior.
Definition: HybridSmoother.cpp:143
gtsam::HybridGaussianFactorGraph::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to This
Definition: HybridGaussianFactorGraph.h:120
gtsam::HybridSmoother::update
void update(HybridGaussianFactorGraph graph, std::optional< size_t > maxNrLeaves={}, const std::optional< Ordering > given_ordering={})
Definition: HybridSmoother.cpp:59
gtsam::EliminateableFactorGraph::eliminateSequential
std::shared_ptr< BayesNetType > eliminateSequential(OptionalOrderingType orderingType={}, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Definition: EliminateableFactorGraph-inst.h:29
switching3::switching
const Switching switching(3)
Pose2.h
2D Pose
DiscreteBayesNet.h
GaussianFactorGraph.h
Linear Factor Graph where all factors are Gaussians.
gtsam::HybridValues
Definition: HybridValues.h:37
rng
static std::mt19937 rng
Definition: timeFactorOverhead.cpp:31
CreateHybridGaussianFactorGraph
static HybridGaussianFactorGraph::shared_ptr CreateHybridGaussianFactorGraph()
Definition: testHybridEstimation.cpp:448
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
d
static const double d[K][N]
Definition: igam.h:11
EXPECT_LONGS_EQUAL
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
EXPECT
#define EXPECT(condition)
Definition: Test.h:150
gtsam::Z_3x1
static const Eigen::MatrixBase< Vector3 >::ConstantReturnType Z_3x1
Definition: Vector.h:46
HybridNonlinearFactorGraph.h
Nonlinear hybrid factor graph that uses type erasure.
main
int main()
Definition: testHybridEstimation.cpp:647
TestHarness.h
gtsam::HybridBayesNet
Definition: HybridBayesNet.h:37
gtsam::HybridNonlinearFactorGraph
Definition: HybridNonlinearFactorGraph.h:33
gtsam::VectorValues::insert
iterator insert(const std::pair< Key, Vector > &key_value)
Definition: VectorValues.cpp:90
z1
Point2 z1
Definition: testTriangulationFactor.cpp:45
initial
Values initial
Definition: OdometryOptimize.cpp:2
Switching.h
estimation_fixture::discrete_seq
std::vector< size_t > discrete_seq
Definition: testHybridEstimation.cpp:54
gtsam::PriorFactor
Definition: nonlinear/PriorFactor.h:30
HybridBayesNet.h
A Bayes net of Gaussian Conditionals indexed by discrete keys.
different_sigmas::values
HybridValues values
Definition: testHybridBayesNet.cpp:245
gtsam::NonlinearFactorGraph::linearize
std::shared_ptr< GaussianFactorGraph > linearize(const Values &linearizationPoint) const
Linearize a nonlinear factor graph.
Definition: NonlinearFactorGraph.cpp:239
X
#define X
Definition: icosphere.cpp:20
gtsam::DiscreteKeys
DiscreteKeys is a set of keys that can be assembled using the & operator.
Definition: DiscreteKey.h:41
GaussianBayesNet.h
Chordal Bayes Net, the result of eliminating a factor graph.
vv
static const VectorValues vv
Definition: testHybridGaussianConditional.cpp:44
gtsam::Vector3
Eigen::Vector3d Vector3
Definition: Vector.h:43
exp
const EIGEN_DEVICE_FUNC ExpReturnType exp() const
Definition: ArrayCwiseUnaryOps.h:97
result
Values result
Definition: OdometryOptimize.cpp:8
HybridGaussianFactor.h
A set of GaussianFactors, indexed by a set of discrete keys.
test_motion::noise_model
auto noise_model
Definition: testHybridNonlinearFactorGraph.cpp:119
gtsam::Switching::linearizedFactorGraph
HybridGaussianFactorGraph linearizedFactorGraph() const
Get the full linear factor graph.
Definition: Switching.h:220
HybridSmoother.h
An incremental smoother for hybrid factor graphs.
GetProbPrimeTree
AlgebraicDecisionTree< Key > GetProbPrimeTree(const HybridGaussianFactorGraph &graph)
Helper method to get the tree of unnormalized probabilities as per the elimination scheme.
Definition: testHybridEstimation.cpp:273
gtsam::NonlinearISAM::update
void update(const NonlinearFactorGraph &newFactors, const Values &initialValues)
Definition: NonlinearISAM.cpp:35
sampling::sigma
static const double sigma
Definition: testGaussianBayesNet.cpp:170
gtsam::AlgebraicDecisionTree< Key >
gtsam::Switching
ϕ(X(0)) .. ϕ(X(k),X(k+1)) .. ϕ(X(k);z_k) .. ϕ(M(0)) .. ϕ(M(K-3),M(K-2))
Definition: Switching.h:124
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
gtsam::HybridBayesNet::optimize
HybridValues optimize() const
Solve the HybridBayesNet by first computing the MPE of all the discrete variables and then optimizing...
Definition: HybridBayesNet.cpp:123
gtsam::NonlinearFactorGraph::error
double error(const Values &values) const
Definition: NonlinearFactorGraph.cpp:170
gtsam::Values::at
const ValueType at(Key j) const
Definition: Values-inl.h:261
PriorFactor.h
estimation_fixture::InitializeEstimationProblem
Switching InitializeEstimationProblem(const size_t K, const double between_sigma, const double measurement_sigma, const std::vector< double > &measurements, const std::string &transitionProbabilityTable, HybridNonlinearFactorGraph &graph, Values &initial)
Definition: testHybridEstimation.cpp:57
gtsam::FactorGraph::resize
virtual void resize(size_t size)
Definition: FactorGraph.h:367
different_sigmas::bayesNet
const HybridBayesNet bayesNet
Definition: testHybridBayesNet.cpp:242
gtsam.examples.PlanarManipulatorExample.delta
def delta(g0, g1)
Definition: PlanarManipulatorExample.py:45
gtsam::VectorValues
Definition: VectorValues.h:74
BetweenFactor.h
mode
static const DiscreteKey mode(modeKey, 2)
parameters
static ConjugateGradientParameters parameters
Definition: testIterative.cpp:33
isam
NonlinearISAM isam(relinearizeInterval)
gtsam::make_shared
gtsam::enable_if_t< needs_eigen_aligned_allocator< T >::value, std::shared_ptr< T > > make_shared(Args &&... args)
Definition: make_shared.h:56
cholesky::expected
Matrix expected
Definition: testMatrix.cpp:971
gtsam::HybridGaussianConditional
A conditional of gaussian conditionals indexed by discrete variables, as part of a Bayes Network....
Definition: HybridGaussianConditional.h:54
gtsam::HybridGaussianFactorGraph
Definition: HybridGaussianFactorGraph.h:106
mf
Matrix2f mf
Definition: MatrixBase_cast.cpp:2
gtsam::HybridNonlinearISAM
Definition: HybridNonlinearISAM.h:29
gtsam::NonlinearISAM::estimate
Values estimate() const
Definition: NonlinearISAM.cpp:80
Symbol.h
estimation_fixture
Definition: testHybridEstimation.cpp:50
TEST
TEST(HybridEstimation, Full)
Definition: testHybridEstimation.cpp:77
m
Matrix3f m
Definition: AngleAxis_mimic_euler.cpp:1
gtsam::HybridSmoother::getOrdering
Ordering getOrdering(const HybridGaussianFactorGraph &newFactors)
Definition: HybridSmoother.cpp:27
CreateHybridNonlinearFactorGraph
static HybridNonlinearFactorGraph CreateHybridNonlinearFactorGraph()
Definition: testHybridEstimation.cpp:422
EXPECT_DOUBLES_EQUAL
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
ordering
static enum @1096 ordering
TestResult
Definition: TestResult.h:26
JacobianFactor.h
gtsam::FactorGraph::size
size_t size() const
Definition: FactorGraph.h:297
gtsam::DecisionTree
a decision tree is a function from assignments to values.
Definition: DecisionTree.h:62
gtsam::HybridSmoother
Definition: HybridSmoother.h:27
HybridNonlinearFactor.h
A set of nonlinear factors indexed by a set of discrete keys.
gtsam::HybridBayesNet::push_back
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
Definition: HybridBayesNet.h:76
gtsam::DiscreteConditional
Definition: DiscreteConditional.h:37
gtsam::Switching::modeChain
HybridNonlinearFactorGraph modeChain
Definition: Switching.h:128
gtsam
traits
Definition: SFMdata.h:40
error
static double error
Definition: testRot3.cpp:37
NoiseModel.h
K
#define K
Definition: igam.h:8
estimation_fixture::measurements
std::vector< double > measurements
Definition: testHybridEstimation.cpp:51
NonlinearFactorGraph.h
Factor Graph consisting of non-linear factors.
gtsam::DiscreteValues
Definition: DiscreteValues.h:34
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
gtsam::Values
Definition: Values.h:65
gtsam::DiscreteKey
std::pair< Key, size_t > DiscreteKey
Definition: DiscreteKey.h:38
std
Definition: BFloat16.h:88
gtsam::Switching::linearizationPoint
Values linearizationPoint
Definition: Switching.h:130
gtsam::HybridValues::discrete
const DiscreteValues & discrete() const
Return the discrete values.
Definition: HybridValues.cpp:57
gtsam::Values::insert
void insert(Key j, const Value &val)
Definition: Values.cpp:155
gtsam::HybridNonlinearFactorGraph::linearize
std::shared_ptr< HybridGaussianFactorGraph > linearize(const Values &continuousValues) const
Linearize all the continuous factors in the HybridNonlinearFactorGraph.
Definition: HybridNonlinearFactorGraph.cpp:139
GaussianBayesTree.h
Gaussian Bayes Tree, the result of eliminating a GaussianJunctionTree.
gtsam::assert_equal
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:40
HybridNonlinearISAM.h
initial
Definition: testScenarioRunner.cpp:148
gtsam::ISAM
Definition: ISAM.h:31
reverse
void reverse(const MatrixType &m)
Definition: array_reverse.cpp:16
test_motion::components
std::vector< NoiseModelFactor::shared_ptr > components
Definition: testHybridNonlinearFactorGraph.cpp:120
gtsam::HybridBayesNet::shared_ptr
std::shared_ptr< HybridBayesNet > shared_ptr
Definition: HybridBayesNet.h:42
gtsam::Switching::binaryFactors
HybridNonlinearFactorGraph binaryFactors
Definition: Switching.h:128
gtsam::HybridBayesNet::toFactorGraph
HybridGaussianFactorGraph toFactorGraph(const VectorValues &measurements) const
Definition: HybridBayesNet.cpp:239
gtsam::FactorGraph::add
IsDerived< DERIVEDFACTOR > add(std::shared_ptr< DERIVEDFACTOR > factor)
add is a synonym for push_back.
Definition: FactorGraph.h:171
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
model1
noiseModel::Isotropic::shared_ptr model1
Definition: testEssentialMatrixFactor.cpp:26
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
Z
#define Z
Definition: icosphere.cpp:21
gtsam::Ordering
Definition: inference/Ordering.h:33
gtsam::Switching::unaryFactors
HybridNonlinearFactorGraph unaryFactors
Definition: Switching.h:128
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9
gtsam::HybridNonlinearFactor
Implementation of a discrete-conditioned hybrid factor.
Definition: HybridNonlinearFactor.h:58
Pose3.h
3D Pose
gtsam::FactorGraph::emplace_shared
IsDerived< DERIVEDFACTOR > emplace_shared(Args &&... args)
Emplace a shared pointer to factor of given type.
Definition: FactorGraph.h:153
gtsam::HybridBayesNet::emplace_shared
void emplace_shared(Args &&...args)
Definition: HybridBayesNet.h:116
gtsam::BetweenFactor
Definition: BetweenFactor.h:40
M
Matrix< RealScalar, Dynamic, Dynamic > M
Definition: bench_gemm.cpp:51


gtsam
Author(s):
autogenerated on Sat Nov 16 2024 04:07:38