testHybridGaussianFactorGraph.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
12 /*
13  * @file testHybridGaussianFactorGraph.cpp
14  * @date Mar 11, 2022
15  * @author Fan Jiang
16  * @author Varun Agrawal
17  * @author Frank Dellaert
18  */
19 
20 #include <CppUnitLite/Test.h>
22 #include <gtsam/base/Testable.h>
24 #include <gtsam/base/Vector.h>
37 #include <gtsam/inference/Key.h>
39 #include <gtsam/inference/Symbol.h>
41 
42 #include <cstddef>
43 #include <memory>
44 #include <vector>
45 
46 #include "Switching.h"
47 #include "TinyHybridExample.h"
48 
49 using namespace std;
50 using namespace gtsam;
51 
56 
57 // Set up sampling
58 std::mt19937_64 kRng(42);
59 
60 static const DiscreteKey m0(M(0), 2), m1(M(1), 2), m2(M(2), 2);
61 
62 /* ************************************************************************* */
64  HybridConditional conditional;
65 
67 
68  hfg.emplace_shared<JacobianFactor>(X(0), I_3x3, Z_3x1);
69 
70  // Define a hybrid gaussian conditional P(x0|x1, c0)
71  // and add it to the factor graph.
73  m0,
74  {std::make_shared<GaussianConditional>(X(0), Z_3x1, I_3x3, X(1), I_3x3),
75  std::make_shared<GaussianConditional>(X(0), Vector3::Ones(), I_3x3, X(1),
76  I_3x3)});
77  hfg.add(gm);
78 
79  EXPECT_LONGS_EQUAL(2, hfg.size());
80 }
81 
82 /* ************************************************************************* */
83 TEST(HybridGaussianFactorGraph, EliminateSequential) {
84  // Test elimination of a single variable.
86 
87  hfg.emplace_shared<JacobianFactor>(0, I_3x3, Z_3x1);
88 
90 
91  EXPECT_LONGS_EQUAL(result.first->size(), 1);
92 }
93 
94 /* ************************************************************************* */
95 
96 namespace two {
97 std::vector<GaussianFactor::shared_ptr> components(Key key) {
98  return {std::make_shared<JacobianFactor>(key, I_3x3, Z_3x1),
99  std::make_shared<JacobianFactor>(key, I_3x3, Vector3::Ones())};
100 }
101 } // namespace two
102 
103 /* ************************************************************************* */
104 TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) {
107 
108  auto result = hfg.eliminate({X(1)});
109 
110  // Check that we have a valid Gaussian conditional.
111  auto hgc = result.first->asHybrid();
112  CHECK(hgc);
113  const HybridValues values{{{X(1), Z_3x1}}, {{M(1), 1}}};
114  EXPECT(HybridConditional::CheckInvariants(*result.first, values));
115 
116  // Check that factor is discrete and correct
117  auto factor = std::dynamic_pointer_cast<DecisionTreeFactor>(result.second);
118  CHECK(factor);
119  // regression test
120  EXPECT(assert_equal(DecisionTreeFactor{m1, "1 1"}, *factor, 1e-5));
121 }
122 
123 /* ************************************************************************* */
124 TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
126 
127  // Add prior on x0
128  hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
129 
130  // Add factor between x0 and x1
131  hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
132 
133  // Add a hybrid gaussian factor ϕ(x1, c1)
135 
136  auto result = hfg.eliminateSequential();
137 
138  auto dc = result->at(2)->asDiscrete();
139  CHECK(dc);
140  DiscreteValues dv;
141  dv[M(1)] = 0;
142  // Regression test
143  EXPECT_DOUBLES_EQUAL(0.62245933120185448, dc->operator()(dv), 1e-3);
144 }
145 
146 /* ************************************************************************* */
147 TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
149 
150  // Add prior on x0
151  hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
152  // Add factor between x0 and x1
153  hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
154 
156 
157  // Discrete probability table for c1
158  hfg.add(DecisionTreeFactor(m1, {2, 8}));
159  // Joint discrete probability table for c1, c2
160  hfg.add(DecisionTreeFactor({m1, m2}, "1 2 3 4"));
161 
163 
164  // There are 4 variables (2 continuous + 2 discrete) in the bayes net.
166 }
167 
168 /* ************************************************************************* */
169 // Test API for the smallest switching network.
170 // None of these are regression tests.
172  // Create switching network with two continuous variables and one discrete:
173  // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1;z1) ϕ(m0)
174  const double betweenSigma = 0.3, priorSigma = 0.1;
175  Switching s(2, betweenSigma, priorSigma);
176 
177  // Check size of linearized factor graph
178  const HybridGaussianFactorGraph &graph = s.linearizedFactorGraph();
180 
181  // Create some continuous and discrete values
182  const VectorValues continuousValues{{X(0), Vector1(0.1)},
183  {X(1), Vector1(1.2)}};
184  const DiscreteValues modeZero{{M(0), 0}}, modeOne{{M(0), 1}};
185 
186  // Get the hybrid gaussian factor and check it is as expected
187  auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(graph.at(2));
188  CHECK(hgf);
189 
190  // Get factors and scalars for both modes
191  auto [factor0, scalar0] = (*hgf)(modeZero);
192  auto [factor1, scalar1] = (*hgf)(modeOne);
193  CHECK(factor0);
194  CHECK(factor1);
195 
196  // Check scalars against negLogConstant of noise model
197  auto betweenModel = noiseModel::Isotropic::Sigma(1, betweenSigma);
198  EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar0, 1e-9);
199  EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar1, 1e-9);
200 
201  // Check error for M(0) = 0
202  const HybridValues values0{continuousValues, modeZero};
203  double expectedError0 = 0;
204  for (const auto &factor : graph) expectedError0 += factor->error(values0);
205  EXPECT_DOUBLES_EQUAL(expectedError0, graph.error(values0), 1e-5);
206 
207  // Check error for M(0) = 1
208  const HybridValues values1{continuousValues, modeOne};
209  double expectedError1 = 0;
210  for (const auto &factor : graph) expectedError1 += factor->error(values1);
211  EXPECT_DOUBLES_EQUAL(expectedError1, graph.error(values1), 1e-5);
212 
213  // Check errorTree
214  AlgebraicDecisionTree<Key> actualErrors = graph.errorTree(continuousValues);
215  // Create expected error tree
216  const AlgebraicDecisionTree<Key> expectedErrors(M(0), expectedError0,
217  expectedError1);
218 
219  // Check that the actual error tree matches the expected one
220  EXPECT(assert_equal(expectedErrors, actualErrors, 1e-5));
221 
222  // Check probPrime
223  const double probPrime0 = graph.probPrime(values0);
224  EXPECT_DOUBLES_EQUAL(std::exp(-expectedError0), probPrime0, 1e-5);
225 
226  const double probPrime1 = graph.probPrime(values1);
227  EXPECT_DOUBLES_EQUAL(std::exp(-expectedError1), probPrime1, 1e-5);
228 
229  // Check discretePosterior
230  const AlgebraicDecisionTree<Key> graphPosterior =
231  graph.discretePosterior(continuousValues);
232  const double sum = probPrime0 + probPrime1;
233  const AlgebraicDecisionTree<Key> expectedPosterior(M(0), probPrime0 / sum,
234  probPrime1 / sum);
235  EXPECT(assert_equal(expectedPosterior, graphPosterior, 1e-5));
236 
237  // Make the clique of factors connected to x0:
238  HybridGaussianFactorGraph factors_x0;
239  factors_x0.push_back(graph.at(0));
240  factors_x0.push_back(hgf);
241 
242  // Test collectProductFactor
243  auto productFactor = factors_x0.collectProductFactor();
244 
245  // For M(0) = 0
246  auto [gaussianFactor0, actualScalar0] = productFactor(modeZero);
247  EXPECT(gaussianFactor0.size() == 2);
248  EXPECT_DOUBLES_EQUAL((*hgf)(modeZero).second, actualScalar0, 1e-5);
249 
250  // For M(0) = 1
251  auto [gaussianFactor1, actualScalar1] = productFactor(modeOne);
252  EXPECT(gaussianFactor1.size() == 2);
253  EXPECT_DOUBLES_EQUAL((*hgf)(modeOne).second, actualScalar1, 1e-5);
254 
255  // Test eliminate x0
256  const Ordering ordering{X(0)};
257  auto [conditional, factor] = factors_x0.eliminate(ordering);
258 
259  // Check the conditional
260  CHECK(conditional);
261  EXPECT(conditional->isHybrid());
262  auto p_x0_given_x1_m = conditional->asHybrid();
263  CHECK(p_x0_given_x1_m);
264  EXPECT(HybridGaussianConditional::CheckInvariants(*p_x0_given_x1_m, values1));
265  EXPECT_LONGS_EQUAL(1, p_x0_given_x1_m->nrFrontals()); // x0
266  EXPECT_LONGS_EQUAL(2, p_x0_given_x1_m->nrParents()); // x1, m0
267 
268  // Check the remaining factor
269  EXPECT(factor);
270  EXPECT(std::dynamic_pointer_cast<HybridGaussianFactor>(factor));
271  auto phi_x1_m = std::dynamic_pointer_cast<HybridGaussianFactor>(factor);
272  EXPECT_LONGS_EQUAL(2, phi_x1_m->keys().size()); // x1, m0
273  // Check that the scalars incorporate the negative log constant of the
274  // conditional
275  EXPECT_DOUBLES_EQUAL(scalar0 - (*p_x0_given_x1_m)(modeZero)->negLogConstant(),
276  (*phi_x1_m)(modeZero).second, 1e-9);
277  EXPECT_DOUBLES_EQUAL(scalar1 - (*p_x0_given_x1_m)(modeOne)->negLogConstant(),
278  (*phi_x1_m)(modeOne).second, 1e-9);
279 
280  // Check that the conditional and remaining factor are consistent for both
281  // modes
282  for (auto &&mode : {modeZero, modeOne}) {
283  const auto gc = (*p_x0_given_x1_m)(mode);
284  const auto [gf, scalar] = (*phi_x1_m)(mode);
285 
286  // The error of the original factors should equal the sum of errors of the
287  // conditional and remaining factor, modulo the normalization constant of
288  // the conditional.
289  double originalError = factors_x0.error({continuousValues, mode});
290  const double actualError = gc->negLogConstant() +
291  gc->error(continuousValues) +
292  gf->error(continuousValues) + scalar;
293  EXPECT_DOUBLES_EQUAL(originalError, actualError, 1e-9);
294  }
295 
296  // Create a clique for x1
297  HybridGaussianFactorGraph factors_x1;
298  factors_x1.push_back(
299  factor); // Use the remaining factor from previous elimination
300  factors_x1.push_back(
301  graph.at(1)); // Add the factor for x1 from the original graph
302 
303  // Test collectProductFactor for x1 clique
304  auto productFactor_x1 = factors_x1.collectProductFactor();
305 
306  // For M(0) = 0
307  auto [gaussianFactor_x1_0, actualScalar_x1_0] = productFactor_x1(modeZero);
308  EXPECT_LONGS_EQUAL(2, gaussianFactor_x1_0.size());
309  // NOTE(Frank): prior on x1 does not contribute to the scalar
310  EXPECT_DOUBLES_EQUAL((*phi_x1_m)(modeZero).second, actualScalar_x1_0, 1e-5);
311 
312  // For M(0) = 1
313  auto [gaussianFactor_x1_1, actualScalar_x1_1] = productFactor_x1(modeOne);
314  EXPECT_LONGS_EQUAL(2, gaussianFactor_x1_1.size());
315  // NOTE(Frank): prior on x1 does not contribute to the scalar
316  EXPECT_DOUBLES_EQUAL((*phi_x1_m)(modeOne).second, actualScalar_x1_1, 1e-5);
317 
318  // Test eliminate for x1 clique
319  Ordering ordering_x1{X(1)};
320  auto [conditional_x1, factor_x1] = factors_x1.eliminate(ordering_x1);
321 
322  // Check the conditional for x1
323  CHECK(conditional_x1);
324  EXPECT(conditional_x1->isHybrid());
325  auto p_x1_given_m = conditional_x1->asHybrid();
326  CHECK(p_x1_given_m);
327  EXPECT_LONGS_EQUAL(1, p_x1_given_m->nrFrontals()); // x1
328  EXPECT_LONGS_EQUAL(1, p_x1_given_m->nrParents()); // m0
329 
330  // Check the remaining factor for x1
331  CHECK(factor_x1);
332  auto phi_x1 = std::dynamic_pointer_cast<DecisionTreeFactor>(factor_x1);
333  CHECK(phi_x1);
334  EXPECT_LONGS_EQUAL(1, phi_x1->keys().size()); // m0
335  // We can't really check the error of the decision tree factor phi_x1, because
336  // the continuous factor whose error(kEmpty) we need is not available.
337 
338  // Now test full elimination of the graph:
339  auto hybridBayesNet = graph.eliminateSequential();
340  CHECK(hybridBayesNet);
341 
342  // Check that the posterior P(M|X=continuousValues) from the Bayes net is the
343  // same as the same posterior from the graph. This is a sanity check that the
344  // elimination is done correctly.
345  AlgebraicDecisionTree<Key> bnPosterior =
346  hybridBayesNet->discretePosterior(continuousValues);
347  EXPECT(assert_equal(graphPosterior, bnPosterior));
348 }
349 
350 /* ****************************************************************************/
351 // Test subset of API for switching network with 3 states.
352 // None of these are regression tests.
353 TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) {
354  // Create switching network with three continuous variables and two discrete:
355  // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1)
356  Switching s(3);
357 
358  // Check size of linearized factor graph
359  const HybridGaussianFactorGraph &graph = s.linearizedFactorGraph();
361 
362  // Eliminate the graph
363  const HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
364 
365  const HybridValues delta = hybridBayesNet->optimize();
366  const double error = graph.error(delta);
367 
368  // Check that the probability prime is the exponential of the error
370 
371  // Check that the posterior P(M|X=continuousValues) from the Bayes net is the
372  // same as the same posterior from the graph. This is a sanity check that the
373  // elimination is done correctly.
374  const AlgebraicDecisionTree<Key> graphPosterior =
375  graph.discretePosterior(delta.continuous());
376  const AlgebraicDecisionTree<Key> bnPosterior =
377  hybridBayesNet->discretePosterior(delta.continuous());
378  EXPECT(assert_equal(graphPosterior, bnPosterior));
379 }
380 
381 /* ************************************************************************* */
382 // Select a particular continuous factor graph given a discrete assignment
383 TEST(HybridGaussianFactorGraph, DiscreteSelection) {
384  Switching s(3);
385 
386  HybridGaussianFactorGraph graph = s.linearizedFactorGraph();
387 
388  DiscreteValues dv00{{M(0), 0}, {M(1), 0}};
389  GaussianFactorGraph continuous_00 = graph(dv00);
390  GaussianFactorGraph expected_00;
391  expected_00.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10)));
392  expected_00.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10)));
393  expected_00.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10)));
394  expected_00.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-1)));
395  expected_00.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-1)));
396 
397  EXPECT(assert_equal(expected_00, continuous_00));
398 
399  DiscreteValues dv01{{M(0), 0}, {M(1), 1}};
400  GaussianFactorGraph continuous_01 = graph(dv01);
401  GaussianFactorGraph expected_01;
402  expected_01.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10)));
403  expected_01.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10)));
404  expected_01.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10)));
405  expected_01.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-1)));
406  expected_01.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-0)));
407 
408  EXPECT(assert_equal(expected_01, continuous_01));
409 
410  DiscreteValues dv10{{M(0), 1}, {M(1), 0}};
411  GaussianFactorGraph continuous_10 = graph(dv10);
412  GaussianFactorGraph expected_10;
413  expected_10.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10)));
414  expected_10.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10)));
415  expected_10.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10)));
416  expected_10.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-0)));
417  expected_10.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-1)));
418 
419  EXPECT(assert_equal(expected_10, continuous_10));
420 
421  DiscreteValues dv11{{M(0), 1}, {M(1), 1}};
422  GaussianFactorGraph continuous_11 = graph(dv11);
423  GaussianFactorGraph expected_11;
424  expected_11.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10)));
425  expected_11.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10)));
426  expected_11.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10)));
427  expected_11.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-0)));
428  expected_11.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-0)));
429 
430  EXPECT(assert_equal(expected_11, continuous_11));
431 }
432 
433 /* ************************************************************************* */
436 
437  hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
438  hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
440 
441  auto result = hfg.eliminateSequential();
442 
443  HybridValues hv = result->optimize();
444 
445  EXPECT(assert_equal(hv.atDiscrete(M(1)), int(0)));
446 }
447 
448 /* ************************************************************************* */
449 // Test adding of gaussian conditional and re-elimination.
451  Switching switching(4);
452 
454  hfg.push_back(switching.linearUnaryFactors.at(0)); // P(X0)
456  ordering.push_back(X(0));
458 
460  hfg2.push_back(*bayes_net); // P(X0)
461  hfg2.push_back(switching.linearBinaryFactors.at(0)); // P(X0, X1 | M0)
462  hfg2.push_back(switching.linearBinaryFactors.at(1)); // P(X1, X2 | M1)
463  hfg2.push_back(switching.linearUnaryFactors.at(2)); // P(X2)
464  ordering += X(1), X(2), M(0), M(1);
465 
466  // Created product of first two factors and check eliminate:
467  HybridGaussianFactorGraph fragment;
468  fragment.push_back(hfg2[0]);
469  fragment.push_back(hfg2[1]);
470 
471  // Check that product
473  auto leaf = fragment(DiscreteValues{{M(0), 0}});
474  EXPECT_LONGS_EQUAL(2, leaf.size());
475 
476  // Check product and that pruneEmpty does not touch it
477  auto pruned = product.removeEmpty();
478  LONGS_EQUAL(2, pruned.nrLeaves());
479 
480  // Test eliminate
481  auto [hybridConditional, factor] = fragment.eliminate({X(0)});
482  EXPECT(hybridConditional->isHybrid());
483  EXPECT(hybridConditional->keys() == KeyVector({X(0), X(1), M(0)}));
484 
485  EXPECT(dynamic_pointer_cast<HybridGaussianFactor>(factor));
486  EXPECT(factor->keys() == KeyVector({X(1), M(0)}));
487 
488  bayes_net = hfg2.eliminateSequential(ordering);
489 
490  HybridValues result = bayes_net->optimize();
491 
492  Values expected_continuous;
493  expected_continuous.insert<double>(X(0), 0);
494  expected_continuous.insert<double>(X(1), 1);
495  expected_continuous.insert<double>(X(2), 2);
496  expected_continuous.insert<double>(X(3), 4);
497  Values result_continuous =
499  EXPECT(assert_equal(expected_continuous, result_continuous));
500 
501  DiscreteValues expected_discrete;
502  expected_discrete[M(0)] = 1;
503  expected_discrete[M(1)] = 1;
504  EXPECT(assert_equal(expected_discrete, result.discrete()));
505 }
506 
507 /* ****************************************************************************/
508 // Test hybrid gaussian factor graph errorTree during incremental operation
509 TEST(HybridGaussianFactorGraph, IncrementalErrorTree) {
510  Switching s(4);
511 
513  graph.push_back(s.linearUnaryFactors.at(0)); // f(X0)
514  graph.push_back(s.linearBinaryFactors.at(0)); // f(X0, X1, M0)
515  graph.push_back(s.linearBinaryFactors.at(1)); // f(X1, X2, M1)
516  graph.push_back(s.linearUnaryFactors.at(1)); // f(X1)
517  graph.push_back(s.linearUnaryFactors.at(2)); // f(X2)
518  graph.push_back(s.modeChain.at(0)); // f(M0)
519  graph.push_back(s.modeChain.at(1)); // f(M0, M1)
520 
521  HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
522  EXPECT_LONGS_EQUAL(5, hybridBayesNet->size());
523 
524  // Check discrete posterior at optimum
525  HybridValues delta = hybridBayesNet->optimize();
526  AlgebraicDecisionTree<Key> graphPosterior =
527  graph.discretePosterior(delta.continuous());
528  AlgebraicDecisionTree<Key> bnPosterior =
529  hybridBayesNet->discretePosterior(delta.continuous());
530  EXPECT(assert_equal(graphPosterior, bnPosterior));
531 
533  graph.push_back(*hybridBayesNet);
534  graph.push_back(s.linearBinaryFactors.at(2)); // f(X2, X3, M2)
535  graph.push_back(s.linearUnaryFactors.at(3)); // f(X3)
536 
537  hybridBayesNet = graph.eliminateSequential();
538  EXPECT_LONGS_EQUAL(7, hybridBayesNet->size());
539 
540  delta = hybridBayesNet->optimize();
541  graphPosterior = graph.discretePosterior(delta.continuous());
542  bnPosterior = hybridBayesNet->discretePosterior(delta.continuous());
543  EXPECT(assert_equal(graphPosterior, bnPosterior));
544 }
545 
546 /* ****************************************************************************/
547 // Check that collectProductFactor works correctly.
548 TEST(HybridGaussianFactorGraph, collectProductFactor) {
549  const int num_measurements = 1;
550  VectorValues vv{{Z(0), Vector1(5.0)}};
551  auto fg = tiny::createHybridGaussianFactorGraph(num_measurements, vv);
552  EXPECT_LONGS_EQUAL(3, fg.size());
553 
554  // Assemble graph tree:
555  auto actual = fg.collectProductFactor();
556 
557  // Create expected decision tree with two factor graphs:
558 
559  // Get hybrid factor:
560  auto hybrid = fg.at<HybridGaussianFactor>(0);
561  CHECK(hybrid);
562 
563  // Get prior factor:
564  const auto gf = fg.at<HybridConditional>(1);
565  CHECK(gf);
566  using GF = GaussianFactor::shared_ptr;
567  const GF prior = gf->asGaussian();
568  CHECK(prior);
569 
570  // Create DiscreteValues for both 0 and 1:
571  DiscreteValues d0{{M(0), 0}}, d1{{M(0), 1}};
572 
573  // Expected decision tree with two factor graphs:
574  // f(x0;mode=0)P(x0)
575  GaussianFactorGraph expectedFG0{(*hybrid)(d0).first, prior};
576  EXPECT(assert_equal(expectedFG0, actual(d0).first, 1e-5));
577  EXPECT(assert_equal(0.0, actual(d0).second, 1e-5));
578 
579  // f(x0;mode=1)P(x0)
580  GaussianFactorGraph expectedFG1{(*hybrid)(d1).first, prior};
581  EXPECT(assert_equal(expectedFG1, actual(d1).first, 1e-5));
582  EXPECT(assert_equal(1.79176, actual(d1).second, 1e-5));
583 }
584 
585 /* ****************************************************************************/
586 // Check that the factor graph unnormalized probability is proportional to the
587 // Bayes net probability for the given measurements.
589  const HybridGaussianFactorGraph &fg, size_t num_samples = 100) {
590  auto compute_ratio = [&](HybridValues *sample) -> double {
591  sample->update(measurements); // update sample with given measurements:
592  return bn.evaluate(*sample) / fg.probPrime(*sample);
593  };
594 
595  HybridValues sample = bn.sample(&kRng);
596  double expected_ratio = compute_ratio(&sample);
597 
598  // Test ratios for a number of independent samples:
599  for (size_t i = 0; i < num_samples; i++) {
600  HybridValues sample = bn.sample(&kRng);
601  if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false;
602  }
603  return true;
604 }
605 
606 /* ****************************************************************************/
607 // Check that the bayes net unnormalized probability is proportional to the
608 // Bayes net probability for the given measurements.
610  const HybridBayesNet &posterior, size_t num_samples = 100) {
611  auto compute_ratio = [&](HybridValues *sample) -> double {
612  sample->update(measurements); // update sample with given measurements:
613  return bn.evaluate(*sample) / posterior.evaluate(*sample);
614  };
615 
616  HybridValues sample = bn.sample(&kRng);
617  double expected_ratio = compute_ratio(&sample);
618 
619  // Test ratios for a number of independent samples:
620  for (size_t i = 0; i < num_samples; i++) {
621  HybridValues sample = bn.sample(&kRng);
622  // std::cout << "ratio: " << compute_ratio(&sample) << std::endl;
623  if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false;
624  }
625  return true;
626 }
627 
628 /* ****************************************************************************/
629 // Check that eliminating tiny net with 1 measurement yields correct result.
630 TEST(HybridGaussianFactorGraph, EliminateTiny1) {
631  const int num_measurements = 1;
632  const VectorValues measurements{{Z(0), Vector1(5.0)}};
633  auto bn = tiny::createHybridBayesNet(num_measurements);
634  auto fg = bn.toFactorGraph(measurements);
635  EXPECT_LONGS_EQUAL(3, fg.size());
636 
637  EXPECT(ratioTest(bn, measurements, fg));
638 
639  // Create expected Bayes Net:
640  HybridBayesNet expectedBayesNet;
641 
642  // Create hybrid Gaussian factor on X(0).
643  using tiny::mode;
644  // regression, but mean checked to be 5.0 in both cases:
645  const auto conditional0 = std::make_shared<GaussianConditional>(
646  X(0), Vector1(14.1421), I_1x1 * 2.82843),
647  conditional1 = std::make_shared<GaussianConditional>(
648  X(0), Vector1(10.1379), I_1x1 * 2.02759);
649  expectedBayesNet.emplace_shared<HybridGaussianConditional>(
650  mode, std::vector{conditional0, conditional1});
651 
652  // Add prior on mode.
653  expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "74/26");
654 
655  // Test elimination
656  const auto posterior = fg.eliminateSequential();
657  EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01));
658 
659  EXPECT(ratioTest(bn, measurements, *posterior));
660 }
661 
662 /* ****************************************************************************/
663 // Check that eliminating tiny net with 1 measurement with mode order swapped
664 // yields correct result.
665 TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
666  const VectorValues measurements{{Z(0), Vector1(5.0)}};
667 
668  HybridBayesNet bn;
669 
670  // mode-dependent: 1 is low-noise, 0 is high-noise.
671  // Create hybrid Gaussian factor z_0 = x0 + noise for each measurement.
672  std::vector<std::pair<Vector, double>> parms{{Z_1x1, 3}, {Z_1x1, 0.5}};
673  bn.emplace_shared<HybridGaussianConditional>(m1, Z(0), I_1x1, X(0), parms);
674 
675  // Create prior on X(0).
676  bn.push_back(
677  GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5));
678 
679  // Add prior on m1.
681 
682  // bn.print();
683  auto fg = bn.toFactorGraph(measurements);
684  EXPECT_LONGS_EQUAL(3, fg.size());
685 
686  // fg.print();
687 
688  EXPECT(ratioTest(bn, measurements, fg));
689 
690  // Create expected Bayes Net:
691  HybridBayesNet expectedBayesNet;
692 
693  // Create hybrid Gaussian factor on X(0).
694  // regression, but mean checked to be 5.0 in both cases:
695  const auto conditional0 = std::make_shared<GaussianConditional>(
696  X(0), Vector1(10.1379), I_1x1 * 2.02759),
697  conditional1 = std::make_shared<GaussianConditional>(
698  X(0), Vector1(14.1421), I_1x1 * 2.82843);
699  expectedBayesNet.emplace_shared<HybridGaussianConditional>(
700  m1, std::vector{conditional0, conditional1});
701 
702  // Add prior on m1.
703  expectedBayesNet.emplace_shared<DiscreteConditional>(m1, "1/1");
704 
705  // Test elimination
706  const auto posterior = fg.eliminateSequential();
707  // EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01));
708 
709  EXPECT(ratioTest(bn, measurements, *posterior));
710 
711  // posterior->print();
712  // posterior->optimize().print();
713 }
714 
715 /* ****************************************************************************/
716 // Check that eliminating tiny net with 2 measurements yields correct result.
717 TEST(HybridGaussianFactorGraph, EliminateTiny2) {
718  // Create factor graph with 2 measurements such that posterior mean = 5.0.
719  const int num_measurements = 2;
720  const VectorValues measurements{{Z(0), Vector1(4.0)}, {Z(1), Vector1(6.0)}};
721  auto bn = tiny::createHybridBayesNet(num_measurements);
722  auto fg = bn.toFactorGraph(measurements);
723  EXPECT_LONGS_EQUAL(4, fg.size());
724 
725  // Create expected Bayes Net:
726  HybridBayesNet expectedBayesNet;
727 
728  // Create hybrid Gaussian factor on X(0).
729  using tiny::mode;
730  // regression, but mean checked to be 5.0 in both cases:
731  const auto conditional0 = std::make_shared<GaussianConditional>(
732  X(0), Vector1(17.3205), I_1x1 * 3.4641),
733  conditional1 = std::make_shared<GaussianConditional>(
734  X(0), Vector1(10.274), I_1x1 * 2.0548);
735  expectedBayesNet.emplace_shared<HybridGaussianConditional>(
736  mode, std::vector{conditional0, conditional1});
737 
738  // Add prior on mode.
739  expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "23/77");
740 
741  // Test elimination
742  const auto posterior = fg.eliminateSequential();
743  EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01));
744 
745  EXPECT(ratioTest(bn, measurements, *posterior));
746 }
747 
748 /* ****************************************************************************/
749 // Test eliminating tiny net with 1 mode per measurement.
750 TEST(HybridGaussianFactorGraph, EliminateTiny22) {
751  // Create factor graph with 2 measurements such that posterior mean = 5.0.
752  const int num_measurements = 2;
753  const bool manyModes = true;
754 
755  // Create Bayes net and convert to factor graph.
756  auto bn = tiny::createHybridBayesNet(num_measurements, manyModes);
757  const VectorValues measurements{{Z(0), Vector1(4.0)}, {Z(1), Vector1(6.0)}};
758  auto fg = bn.toFactorGraph(measurements);
759  EXPECT_LONGS_EQUAL(5, fg.size());
760 
761  EXPECT(ratioTest(bn, measurements, fg));
762 
763  // Test elimination
764  const auto posterior = fg.eliminateSequential();
765 
766  EXPECT(ratioTest(bn, measurements, *posterior));
767 }
768 
769 /* ****************************************************************************/
770 // Test elimination of a switching network with one mode per measurement.
771 TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
772  // Create a switching network with one mode per measurement.
773  HybridBayesNet bn;
774 
775  // NOTE: we add reverse topological so we can sample from the Bayes net.:
776 
777  // Add measurements:
778  std::vector<std::pair<Vector, double>> measurementModels{{Z_1x1, 3},
779  {Z_1x1, 0.5}};
780  for (size_t t : {0, 1, 2}) {
781  // Create hybrid Gaussian factor on Z(t) conditioned on X(t) and mode N(t):
782  const auto noise_mode_t = DiscreteKey{N(t), 2};
783  bn.emplace_shared<HybridGaussianConditional>(noise_mode_t, Z(t), I_1x1,
784  X(t), measurementModels);
785 
786  // Create prior on discrete mode N(t):
787  bn.emplace_shared<DiscreteConditional>(noise_mode_t, "20/80");
788  }
789 
790  // Add motion models. TODO(frank): why are they exactly the same?
791  std::vector<std::pair<Vector, double>> motionModels{{Z_1x1, 0.2},
792  {Z_1x1, 0.2}};
793  for (size_t t : {2, 1}) {
794  // Create hybrid Gaussian factor on X(t) conditioned on X(t-1)
795  // and mode M(t-1):
796  const auto motion_model_t = DiscreteKey{M(t), 2};
797  bn.emplace_shared<HybridGaussianConditional>(motion_model_t, X(t), I_1x1,
798  X(t - 1), motionModels);
799 
800  // Create prior on motion model M(t):
801  bn.emplace_shared<DiscreteConditional>(motion_model_t, "40/60");
802  }
803 
804  // Create Gaussian prior on continuous X(0) using sharedMeanAndStddev:
805  bn.push_back(GaussianConditional::sharedMeanAndStddev(X(0), Z_1x1, 0.1));
806 
807  // Make sure we an sample from the Bayes net:
809 
810  // Create measurements consistent with moving right every time:
812  {Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}};
814 
815  // Factor graph is:
816  // D D
817  // | |
818  // m1 m2
819  // | |
820  // C-x0-HC-x1-HC-x2
821  // | | |
822  // HF HF HF
823  // | | |
824  // n0 n1 n2
825  // | | |
826  // D D D
827  EXPECT_LONGS_EQUAL(11, fg.size());
828  EXPECT(ratioTest(bn, measurements, fg));
829 
830  // Do elimination of X(2) only:
831  auto [bn1, fg1] = fg.eliminatePartialSequential(Ordering{X(2)});
832  fg1->push_back(*bn1);
833  EXPECT(ratioTest(bn, measurements, *fg1));
834 
835  // Create ordering that eliminates in time order, then discrete modes:
836  Ordering ordering{X(2), X(1), X(0), N(0), N(1), N(2), M(1), M(2)};
837 
838  // Do elimination:
840 
841  // Test resulting posterior Bayes net has correct size:
842  EXPECT_LONGS_EQUAL(8, posterior->size());
843 
844  // Ratio test
845  EXPECT(ratioTest(bn, measurements, *posterior));
846 }
847 
848 /* ************************************************************************* */
849 int main() {
850  TestResult tr;
851  return TestRegistry::runAllTests(tr);
852 }
853 /* ************************************************************************* */
TestRegistry::runAllTests
static int runAllTests(TestResult &result)
Definition: TestRegistry.cpp:27
different_sigmas::parms
const std::vector< std::pair< Vector, double > > parms
Definition: testHybridBayesNet.cpp:234
gtsam::EliminateableFactorGraph::eliminateSequential
std::shared_ptr< BayesNetType > eliminateSequential(OptionalOrderingType orderingType={}, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Definition: EliminateableFactorGraph-inst.h:29
switching3::switching
const Switching switching(3)
gtsam::DecisionTreeFactor
Definition: DecisionTreeFactor.h:44
ratioTest
bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, const HybridGaussianFactorGraph &fg, size_t num_samples=100)
Definition: testHybridGaussianFactorGraph.cpp:588
gtsam::HybridValues
Definition: HybridValues.h:37
Vector.h
typedef and functions to augment Eigen's VectorXd
gtsam::HybridConditional
Definition: HybridConditional.h:60
gtsam::Values::size
size_t size() const
Definition: Values.h:178
HybridGaussianConditional.h
A hybrid conditional in the Conditional Linear Gaussian scheme.
s
RealScalar s
Definition: level1_cplx_impl.h:126
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
gtsam::FactorGraph::error
double error(const HybridValues &values) const
Definition: FactorGraph-inst.h:66
EXPECT_LONGS_EQUAL
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
Testable.h
Concept check for values that can be used in unit tests.
EXPECT
#define EXPECT(condition)
Definition: Test.h:150
gtsam::Z_3x1
static const Eigen::MatrixBase< Vector3 >::ConstantReturnType Z_3x1
Definition: Vector.h:46
gtsam::symbol_shorthand::N
Key N(std::uint64_t j)
Definition: inference/Symbol.h:161
TestHarness.h
gtsam::HybridBayesNet
Definition: HybridBayesNet.h:37
gtsam::symbol_shorthand::M
Key M(std::uint64_t j)
Definition: inference/Symbol.h:160
kRng
std::mt19937_64 kRng(42)
gtsam::HybridBayesNet::evaluate
double evaluate(const HybridValues &values) const
Evaluate hybrid probability density for given HybridValues.
Definition: HybridBayesNet.cpp:210
Switching.h
gtsam::JacobianFactor
Definition: JacobianFactor.h:91
m1
static const DiscreteKey m1(M(1), 2)
HybridBayesNet.h
A Bayes net of Gaussian Conditionals indexed by discrete keys.
gtsam::HybridGaussianProductFactor
Alias for DecisionTree of GaussianFactorGraphs and their scalar sums.
Definition: HybridGaussianProductFactor.h:34
different_sigmas::values
HybridValues values
Definition: testHybridBayesNet.cpp:245
Ordering.h
Variable ordering for the elimination algorithm.
X
#define X
Definition: icosphere.cpp:20
vv
static const VectorValues vv
Definition: testHybridGaussianConditional.cpp:44
exp
const EIGEN_DEVICE_FUNC ExpReturnType exp() const
Definition: ArrayCwiseUnaryOps.h:97
gtsam::KeyVector
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:92
gtsam::NonlinearFactorGraph::probPrime
double probPrime(const Values &values) const
Definition: NonlinearFactorGraph.cpp:49
result
Values result
Definition: OdometryOptimize.cpp:8
HybridGaussianFactor.h
A set of GaussianFactors, indexed by a set of discrete keys.
gtsam::HybridGaussianFactorGraph::collectProductFactor
HybridGaussianProductFactor collectProductFactor() const
Create a decision tree of factor graphs out of this hybrid factor graph.
Definition: HybridGaussianFactorGraph.cpp:176
gtsam::HybridValues::continuous
const VectorValues & continuous() const
Return the multi-dimensional vector values.
Definition: HybridValues.cpp:54
TestableAssertions.h
Provides additional testing facilities for common data structures.
gtsam::AlgebraicDecisionTree< Key >
gtsam::Switching
ϕ(X(0)) .. ϕ(X(k),X(k+1)) .. ϕ(X(k);z_k) .. ϕ(M(0)) .. ϕ(M(K-3),M(K-2))
Definition: Switching.h:122
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
Key.h
gtsam::GaussianFactorGraph
Definition: GaussianFactorGraph.h:73
gtsam::symbol_shorthand::X
Key X(std::uint64_t j)
Definition: inference/Symbol.h:171
gtsam::NonlinearFactorGraph::error
double error(const Values &values) const
Definition: NonlinearFactorGraph.cpp:170
gtsam::Values::at
const ValueType at(Key j) const
Definition: Values-inl.h:261
Vector1
Eigen::Matrix< double, 1, 1 > Vector1
Definition: testEvent.cpp:33
gtsam::Values::retract
Values retract(const VectorValues &delta) const
Definition: Values.cpp:98
HybridFactor.h
gtsam.examples.PlanarManipulatorExample.delta
def delta(g0, g1)
Definition: PlanarManipulatorExample.py:45
gtsam::VectorValues
Definition: VectorValues.h:74
two::components
std::vector< GaussianFactor::shared_ptr > components(Key key)
Definition: testHybridBayesTree.cpp:60
gtsam::EliminateableFactorGraph::eliminatePartialSequential
std::pair< std::shared_ptr< BayesNetType >, std::shared_ptr< FactorGraphType > > eliminatePartialSequential(const Ordering &ordering, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Definition: EliminateableFactorGraph-inst.h:151
gtsam::symbol_shorthand::Z
Key Z(std::uint64_t j)
Definition: inference/Symbol.h:173
mode
static const DiscreteKey mode(modeKey, 2)
gtsam::Switching::linearUnaryFactors
HybridGaussianFactorGraph linearUnaryFactors
Definition: Switching.h:127
scalar
mxArray * scalar(mxClassID classid)
Definition: matlab.h:82
HybridGaussianProductFactor.h
gtsam::GaussianFactor::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianFactor.h:42
TinyHybridExample.h
gtsam::HybridGaussianConditional
A conditional of gaussian conditionals indexed by discrete variables, as part of a Bayes Network....
Definition: HybridGaussianConditional.h:54
gtsam::HybridGaussianFactorGraph::eliminate
std::pair< std::shared_ptr< HybridConditional >, std::shared_ptr< Factor > > eliminate(const Ordering &keys) const
Eliminate the given continuous keys.
Definition: HybridGaussianFactorGraph.cpp:357
gtsam::HybridGaussianFactorGraph
Definition: HybridGaussianFactorGraph.h:105
Symbol.h
BayesNet.h
Bayes network.
EXPECT_DOUBLES_EQUAL
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
ordering
static enum @1096 ordering
TestResult
Definition: TestResult.h:26
m2
static const DiscreteKey m2(M(2), 2)
DiscreteKey.h
specialized key for discrete variables
TEST
TEST(HybridGaussianFactorGraph, Creation)
Definition: testHybridGaussianFactorGraph.cpp:63
key
const gtsam::Symbol key('X', 0)
JacobianFactor.h
gtsam::FactorGraph::size
size_t size() const
Definition: FactorGraph.h:297
gtsam::HybridBayesNet::push_back
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
Definition: HybridBayesNet.h:76
gtsam::DiscreteConditional
Definition: DiscreteConditional.h:37
different_sigmas::gc
const auto gc
Definition: testHybridBayesNet.cpp:231
Test.h
HybridGaussianFactorGraph.h
Linearized Hybrid factor graph that uses type erasure.
gtsam::tiny::createHybridGaussianFactorGraph
HybridGaussianFactorGraph createHybridGaussianFactorGraph(size_t num_measurements=1, std::optional< VectorValues > measurements={}, bool manyModes=false)
Definition: TinyHybridExample.h:70
gtsam
traits
Definition: SFMdata.h:40
gtsam::HybridValues::atDiscrete
size_t & atDiscrete(Key j)
Definition: HybridValues.cpp:132
DiscreteValues.h
error
static double error
Definition: testRot3.cpp:37
estimation_fixture::measurements
std::vector< double > measurements
Definition: testHybridEstimation.cpp:51
gtsam::HybridGaussianFactorGraph::probPrime
double probPrime(const HybridValues &values) const
Compute the unnormalized posterior probability for a continuous vector values given a specific assign...
Definition: HybridGaussianFactorGraph.cpp:541
gtsam::DiscreteValues
Definition: DiscreteValues.h:34
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
gtsam::Values
Definition: Values.h:65
CHECK
#define CHECK(condition)
Definition: Test.h:108
gtsam::HybridBayesNet::sample
HybridValues sample(const HybridValues &given, std::mt19937_64 *rng) const
Sample from an incomplete BayesNet, given missing variables.
Definition: HybridBayesNet.cpp:153
gtsam::DiscreteKey
std::pair< Key, size_t > DiscreteKey
Definition: DiscreteKey.h:38
gtsam::Switching::linearBinaryFactors
HybridGaussianFactorGraph linearBinaryFactors
Definition: Switching.h:127
std
Definition: BFloat16.h:88
gtsam::Switching::linearizationPoint
Values linearizationPoint
Definition: Switching.h:128
gtsam::HybridGaussianFactor
Implementation of a discrete-conditioned hybrid factor. Implements a joint discrete-continuous factor...
Definition: HybridGaussianFactor.h:60
gtsam::Values::insert
void insert(Key j, const Value &val)
Definition: Values.cpp:155
leaf
Definition: testExpressionFactor.cpp:42
gtsam::VectorValues::size
size_t size() const
Definition: VectorValues.h:130
gtsam::assert_equal
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:40
product
void product(const MatrixType &m)
Definition: product.h:20
different_sigmas::prior
const auto prior
Definition: testHybridBayesNet.cpp:238
gtsam::tiny::createHybridBayesNet
HybridBayesNet createHybridBayesNet(size_t num_measurements=1, bool manyModes=false)
Definition: TinyHybridExample.h:39
abs
#define abs(x)
Definition: datatypes.h:17
N
#define N
Definition: igam.h:9
different_sigmas::hgc
const auto hgc
Definition: testHybridBayesNet.cpp:236
test_motion::components
std::vector< NoiseModelFactor::shared_ptr > components
Definition: testHybridNonlinearFactorGraph.cpp:120
gtsam::HybridBayesNet::shared_ptr
std::shared_ptr< HybridBayesNet > shared_ptr
Definition: HybridBayesNet.h:42
gtsam::HybridBayesNet::toFactorGraph
HybridGaussianFactorGraph toFactorGraph(const VectorValues &measurements) const
Definition: HybridBayesNet.cpp:215
gtsam::FactorGraph::add
IsDerived< DERIVEDFACTOR > add(std::shared_ptr< DERIVEDFACTOR > factor)
add is a synonym for push_back.
Definition: FactorGraph.h:171
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
align_3::t
Point2 t(10, 10)
gtsam::Factor::size
size_t size() const
Definition: Factor.h:160
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
Z
#define Z
Definition: icosphere.cpp:21
two
Definition: testHybridBayesTree.cpp:59
HybridValues.h
gtsam::Ordering
Definition: inference/Ordering.h:33
DecisionTreeFactor.h
LONGS_EQUAL
#define LONGS_EQUAL(expected, actual)
Definition: Test.h:134
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9
simple_graph::factor1
auto factor1
Definition: testJacobianFactor.cpp:196
m0
static const DiscreteKey m0(M(0), 2)
gtsam::FactorGraph::emplace_shared
IsDerived< DERIVEDFACTOR > emplace_shared(Args &&... args)
Emplace a shared pointer to factor of given type.
Definition: FactorGraph.h:153
main
int main()
Definition: testHybridGaussianFactorGraph.cpp:849
gtsam::HybridBayesNet::emplace_shared
void emplace_shared(Args &&...args)
Definition: HybridBayesNet.h:116
HybridConditional.h
M
Matrix< RealScalar, Dynamic, Dynamic > M
Definition: bench_gemm.cpp:51


gtsam
Author(s):
autogenerated on Fri Nov 1 2024 03:40:14