testHybridGaussianFactorGraph.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
12 /*
13  * @file testHybridGaussianFactorGraph.cpp
14  * @date Mar 11, 2022
15  * @author Fan Jiang
16  * @author Varun Agrawal
17  * @author Frank Dellaert
18  */
19 
20 #include <CppUnitLite/Test.h>
22 #include <gtsam/base/Testable.h>
24 #include <gtsam/base/Vector.h>
38 #include <gtsam/inference/Key.h>
40 #include <gtsam/inference/Symbol.h>
42 
43 #include <cstddef>
44 #include <memory>
45 #include <vector>
46 
47 #include "Switching.h"
48 #include "TinyHybridExample.h"
49 
50 using namespace std;
51 using namespace gtsam;
52 
57 
58 // Set up sampling
59 std::mt19937_64 kRng(42);
60 
61 static const DiscreteKey m0(M(0), 2), m1(M(1), 2), m2(M(2), 2);
62 
63 /* ************************************************************************* */
65  HybridConditional conditional;
66 
68 
69  hfg.emplace_shared<JacobianFactor>(X(0), I_3x3, Z_3x1);
70 
71  // Define a hybrid gaussian conditional P(x0|x1, c0)
72  // and add it to the factor graph.
74  m0,
75  {std::make_shared<GaussianConditional>(X(0), Z_3x1, I_3x3, X(1), I_3x3),
76  std::make_shared<GaussianConditional>(X(0), Vector3::Ones(), I_3x3, X(1),
77  I_3x3)});
78  hfg.add(gm);
79 
80  EXPECT_LONGS_EQUAL(2, hfg.size());
81 }
82 
83 /* ************************************************************************* */
84 TEST(HybridGaussianFactorGraph, EliminateSequential) {
85  // Test elimination of a single variable.
87 
88  hfg.emplace_shared<JacobianFactor>(0, I_3x3, Z_3x1);
89 
91 
92  EXPECT_LONGS_EQUAL(result.first->size(), 1);
93 }
94 
95 /* ************************************************************************* */
96 
97 namespace two {
98 std::vector<GaussianFactor::shared_ptr> components(Key key) {
99  return {std::make_shared<JacobianFactor>(key, I_3x3, Z_3x1),
100  std::make_shared<JacobianFactor>(key, I_3x3, Vector3::Ones())};
101 }
102 } // namespace two
103 
104 /* ************************************************************************* */
105 TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) {
108 
109  auto result = hfg.eliminate({X(1)});
110 
111  // Check that we have a valid Gaussian conditional.
112  auto hgc = result.first->asHybrid();
113  CHECK(hgc);
114  const HybridValues values{{{X(1), Z_3x1}}, {{M(1), 1}}};
115  EXPECT(HybridConditional::CheckInvariants(*result.first, values));
116 
117  // Check that factor is discrete and correct
118  auto factor = std::dynamic_pointer_cast<TableFactor>(result.second);
119  CHECK(factor);
120  // regression test
121  EXPECT(assert_equal(TableFactor{m1, "1 1"}, *factor, 1e-5));
122 }
123 
124 /* ************************************************************************* */
125 TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
127 
128  // Add prior on x0
129  hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
130 
131  // Add factor between x0 and x1
132  hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
133 
134  // Add a hybrid gaussian factor ϕ(x1, c1)
136 
137  auto result = hfg.eliminateSequential();
138 
139  auto dc = result->at(2)->asDiscrete();
140  CHECK(dc);
141  DiscreteValues dv;
142  dv[M(1)] = 0;
143  // Regression test
144  EXPECT_DOUBLES_EQUAL(0.62245933120185448, dc->operator()(dv), 1e-3);
145 }
146 
147 /* ************************************************************************* */
148 TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
150 
151  // Add prior on x0
152  hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
153  // Add factor between x0 and x1
154  hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
155 
157 
158  // Discrete probability table for c1
159  hfg.add(DecisionTreeFactor(m1, {2, 8}));
160  // Joint discrete probability table for c1, c2
161  hfg.add(DecisionTreeFactor({m1, m2}, "1 2 3 4"));
162 
164 
165  // There are 4 variables (2 continuous + 2 discrete) in the bayes net.
167 }
168 
169 /* ************************************************************************* */
170 // Test API for the smallest switching network.
171 // None of these are regression tests.
173  // Create switching network with two continuous variables and one discrete:
174  // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1;z1) ϕ(m0)
175  const double betweenSigma = 0.3, priorSigma = 0.1;
176  Switching s(2, betweenSigma, priorSigma);
177 
178  // Check size of linearized factor graph
179  const HybridGaussianFactorGraph &graph = s.linearizedFactorGraph();
181 
182  // Create some continuous and discrete values
183  const VectorValues continuousValues{{X(0), Vector1(0.1)},
184  {X(1), Vector1(1.2)}};
185  const DiscreteValues modeZero{{M(0), 0}}, modeOne{{M(0), 1}};
186 
187  // Get the hybrid gaussian factor and check it is as expected
188  auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(graph.at(2));
189  CHECK(hgf);
190 
191  // Get factors and scalars for both modes
192  auto [factor0, scalar0] = (*hgf)(modeZero);
193  auto [factor1, scalar1] = (*hgf)(modeOne);
194  CHECK(factor0);
195  CHECK(factor1);
196 
197  // Check scalars against negLogConstant of noise model
198  auto betweenModel = noiseModel::Isotropic::Sigma(1, betweenSigma);
199  EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar0, 1e-9);
200  EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar1, 1e-9);
201 
202  // Check error for M(0) = 0
203  const HybridValues values0{continuousValues, modeZero};
204  double expectedError0 = 0;
205  for (const auto &factor : graph) expectedError0 += factor->error(values0);
206  EXPECT_DOUBLES_EQUAL(expectedError0, graph.error(values0), 1e-5);
207 
208  // Check error for M(0) = 1
209  const HybridValues values1{continuousValues, modeOne};
210  double expectedError1 = 0;
211  for (const auto &factor : graph) expectedError1 += factor->error(values1);
212  EXPECT_DOUBLES_EQUAL(expectedError1, graph.error(values1), 1e-5);
213 
214  // Check errorTree
215  AlgebraicDecisionTree<Key> actualErrors = graph.errorTree(continuousValues);
216  // Create expected error tree
217  const AlgebraicDecisionTree<Key> expectedErrors(M(0), expectedError0,
218  expectedError1);
219 
220  // Check that the actual error tree matches the expected one
221  EXPECT(assert_equal(expectedErrors, actualErrors, 1e-5));
222 
223  // Check probPrime
224  const double probPrime0 = graph.probPrime(values0);
225  EXPECT_DOUBLES_EQUAL(std::exp(-expectedError0), probPrime0, 1e-5);
226 
227  const double probPrime1 = graph.probPrime(values1);
228  EXPECT_DOUBLES_EQUAL(std::exp(-expectedError1), probPrime1, 1e-5);
229 
230  // Check discretePosterior
231  const AlgebraicDecisionTree<Key> graphPosterior =
232  graph.discretePosterior(continuousValues);
233  const double sum = probPrime0 + probPrime1;
234  const AlgebraicDecisionTree<Key> expectedPosterior(M(0), probPrime0 / sum,
235  probPrime1 / sum);
236  EXPECT(assert_equal(expectedPosterior, graphPosterior, 1e-5));
237 
238  // Make the clique of factors connected to x0:
239  HybridGaussianFactorGraph factors_x0;
240  factors_x0.push_back(graph.at(0));
241  factors_x0.push_back(hgf);
242 
243  // Test collectProductFactor
244  auto productFactor = factors_x0.collectProductFactor();
245 
246  // For M(0) = 0
247  auto [gaussianFactor0, actualScalar0] = productFactor(modeZero);
248  EXPECT(gaussianFactor0.size() == 2);
249  EXPECT_DOUBLES_EQUAL((*hgf)(modeZero).second, actualScalar0, 1e-5);
250 
251  // For M(0) = 1
252  auto [gaussianFactor1, actualScalar1] = productFactor(modeOne);
253  EXPECT(gaussianFactor1.size() == 2);
254  EXPECT_DOUBLES_EQUAL((*hgf)(modeOne).second, actualScalar1, 1e-5);
255 
256  // Test eliminate x0
257  const Ordering ordering{X(0)};
258  auto [conditional, factor] = factors_x0.eliminate(ordering);
259 
260  // Check the conditional
261  CHECK(conditional);
262  EXPECT(conditional->isHybrid());
263  auto p_x0_given_x1_m = conditional->asHybrid();
264  CHECK(p_x0_given_x1_m);
265  EXPECT(HybridGaussianConditional::CheckInvariants(*p_x0_given_x1_m, values1));
266  EXPECT_LONGS_EQUAL(1, p_x0_given_x1_m->nrFrontals()); // x0
267  EXPECT_LONGS_EQUAL(2, p_x0_given_x1_m->nrParents()); // x1, m0
268 
269  // Check the remaining factor
270  EXPECT(factor);
271  EXPECT(std::dynamic_pointer_cast<HybridGaussianFactor>(factor));
272  auto phi_x1_m = std::dynamic_pointer_cast<HybridGaussianFactor>(factor);
273  EXPECT_LONGS_EQUAL(2, phi_x1_m->keys().size()); // x1, m0
274  // Check that the scalars incorporate the negative log constant of the
275  // conditional
276  EXPECT_DOUBLES_EQUAL(scalar0 - (*p_x0_given_x1_m)(modeZero)->negLogConstant(),
277  (*phi_x1_m)(modeZero).second, 1e-9);
278  EXPECT_DOUBLES_EQUAL(scalar1 - (*p_x0_given_x1_m)(modeOne)->negLogConstant(),
279  (*phi_x1_m)(modeOne).second, 1e-9);
280 
281  // Check that the conditional and remaining factor are consistent for both
282  // modes
283  for (auto &&mode : {modeZero, modeOne}) {
284  const auto gc = (*p_x0_given_x1_m)(mode);
285  const auto [gf, scalar] = (*phi_x1_m)(mode);
286 
287  // The error of the original factors should equal the sum of errors of the
288  // conditional and remaining factor, modulo the normalization constant of
289  // the conditional.
290  double originalError = factors_x0.error({continuousValues, mode});
291  const double actualError = gc->negLogConstant() +
292  gc->error(continuousValues) +
293  gf->error(continuousValues) + scalar;
294  EXPECT_DOUBLES_EQUAL(originalError, actualError, 1e-9);
295  }
296 
297  // Create a clique for x1
298  HybridGaussianFactorGraph factors_x1;
299  factors_x1.push_back(
300  factor); // Use the remaining factor from previous elimination
301  factors_x1.push_back(
302  graph.at(1)); // Add the factor for x1 from the original graph
303 
304  // Test collectProductFactor for x1 clique
305  auto productFactor_x1 = factors_x1.collectProductFactor();
306 
307  // For M(0) = 0
308  auto [gaussianFactor_x1_0, actualScalar_x1_0] = productFactor_x1(modeZero);
309  EXPECT_LONGS_EQUAL(2, gaussianFactor_x1_0.size());
310  // NOTE(Frank): prior on x1 does not contribute to the scalar
311  EXPECT_DOUBLES_EQUAL((*phi_x1_m)(modeZero).second, actualScalar_x1_0, 1e-5);
312 
313  // For M(0) = 1
314  auto [gaussianFactor_x1_1, actualScalar_x1_1] = productFactor_x1(modeOne);
315  EXPECT_LONGS_EQUAL(2, gaussianFactor_x1_1.size());
316  // NOTE(Frank): prior on x1 does not contribute to the scalar
317  EXPECT_DOUBLES_EQUAL((*phi_x1_m)(modeOne).second, actualScalar_x1_1, 1e-5);
318 
319  // Test eliminate for x1 clique
320  Ordering ordering_x1{X(1)};
321  auto [conditional_x1, factor_x1] = factors_x1.eliminate(ordering_x1);
322 
323  // Check the conditional for x1
324  CHECK(conditional_x1);
325  EXPECT(conditional_x1->isHybrid());
326  auto p_x1_given_m = conditional_x1->asHybrid();
327  CHECK(p_x1_given_m);
328  EXPECT_LONGS_EQUAL(1, p_x1_given_m->nrFrontals()); // x1
329  EXPECT_LONGS_EQUAL(1, p_x1_given_m->nrParents()); // m0
330 
331  // Check the remaining factor for x1
332  CHECK(factor_x1);
333  auto phi_x1 = std::dynamic_pointer_cast<TableFactor>(factor_x1);
334  CHECK(phi_x1);
335  EXPECT_LONGS_EQUAL(1, phi_x1->keys().size()); // m0
336  // We can't really check the error of the decision tree factor phi_x1, because
337  // the continuous factor whose error(kEmpty) we need is not available.
338 
339  // Now test full elimination of the graph:
340  auto hybridBayesNet = graph.eliminateSequential();
341  CHECK(hybridBayesNet);
342 
343  // Check that the posterior P(M|X=continuousValues) from the Bayes net is the
344  // same as the same posterior from the graph. This is a sanity check that the
345  // elimination is done correctly.
346  AlgebraicDecisionTree<Key> bnPosterior =
347  hybridBayesNet->discretePosterior(continuousValues);
348  EXPECT(assert_equal(graphPosterior, bnPosterior));
349 }
350 
351 /* ****************************************************************************/
352 // Test subset of API for switching network with 3 states.
353 // None of these are regression tests.
354 TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) {
355  // Create switching network with three continuous variables and two discrete:
356  // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1)
357  Switching s(3);
358 
359  // Check size of linearized factor graph
360  const HybridGaussianFactorGraph &graph = s.linearizedFactorGraph();
362 
363  // Eliminate the graph
364  const HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
365 
366  const HybridValues delta = hybridBayesNet->optimize();
367  const double error = graph.error(delta);
368 
369  // Check that the probability prime is the exponential of the error
371 
372  // Check that the posterior P(M|X=continuousValues) from the Bayes net is the
373  // same as the same posterior from the graph. This is a sanity check that the
374  // elimination is done correctly.
375  const AlgebraicDecisionTree<Key> graphPosterior =
376  graph.discretePosterior(delta.continuous());
377  const AlgebraicDecisionTree<Key> bnPosterior =
378  hybridBayesNet->discretePosterior(delta.continuous());
379  EXPECT(assert_equal(graphPosterior, bnPosterior));
380 }
381 
382 /* ************************************************************************* */
383 // Select a particular continuous factor graph given a discrete assignment
384 TEST(HybridGaussianFactorGraph, DiscreteSelection) {
385  Switching s(3);
386 
387  HybridGaussianFactorGraph graph = s.linearizedFactorGraph();
388 
389  DiscreteValues dv00{{M(0), 0}, {M(1), 0}};
390  GaussianFactorGraph continuous_00 = graph(dv00);
391  GaussianFactorGraph expected_00;
392  expected_00.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10)));
393  expected_00.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10)));
394  expected_00.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10)));
395  expected_00.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-1)));
396  expected_00.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-1)));
397 
398  EXPECT(assert_equal(expected_00, continuous_00));
399 
400  DiscreteValues dv01{{M(0), 0}, {M(1), 1}};
401  GaussianFactorGraph continuous_01 = graph(dv01);
402  GaussianFactorGraph expected_01;
403  expected_01.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10)));
404  expected_01.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10)));
405  expected_01.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10)));
406  expected_01.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-1)));
407  expected_01.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-0)));
408 
409  EXPECT(assert_equal(expected_01, continuous_01));
410 
411  DiscreteValues dv10{{M(0), 1}, {M(1), 0}};
412  GaussianFactorGraph continuous_10 = graph(dv10);
413  GaussianFactorGraph expected_10;
414  expected_10.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10)));
415  expected_10.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10)));
416  expected_10.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10)));
417  expected_10.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-0)));
418  expected_10.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-1)));
419 
420  EXPECT(assert_equal(expected_10, continuous_10));
421 
422  DiscreteValues dv11{{M(0), 1}, {M(1), 1}};
423  GaussianFactorGraph continuous_11 = graph(dv11);
424  GaussianFactorGraph expected_11;
425  expected_11.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10)));
426  expected_11.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10)));
427  expected_11.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10)));
428  expected_11.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-0)));
429  expected_11.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-0)));
430 
431  EXPECT(assert_equal(expected_11, continuous_11));
432 }
433 
434 /* ************************************************************************* */
437 
438  hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
439  hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
441 
442  auto result = hfg.eliminateSequential();
443 
444  HybridValues hv = result->optimize();
445 
446  EXPECT(assert_equal(hv.atDiscrete(M(1)), int(0)));
447 }
448 
449 /* ************************************************************************* */
450 // Test adding of gaussian conditional and re-elimination.
452  Switching switching(4);
453 
455  hfg.push_back(switching.linearUnaryFactors.at(0)); // P(X0)
457  ordering.push_back(X(0));
459 
461  hfg2.push_back(*bayes_net); // P(X0)
462  hfg2.push_back(switching.linearBinaryFactors.at(0)); // P(X0, X1 | M0)
463  hfg2.push_back(switching.linearBinaryFactors.at(1)); // P(X1, X2 | M1)
464  hfg2.push_back(switching.linearUnaryFactors.at(2)); // P(X2)
465  ordering += X(1), X(2), M(0), M(1);
466 
467  // Created product of first two factors and check eliminate:
468  HybridGaussianFactorGraph fragment;
469  fragment.push_back(hfg2[0]);
470  fragment.push_back(hfg2[1]);
471 
472  // Check that product
474  auto leaf = fragment(DiscreteValues{{M(0), 0}});
475  EXPECT_LONGS_EQUAL(2, leaf.size());
476 
477  // Check product and that pruneEmpty does not touch it
478  auto pruned = product.removeEmpty();
479  LONGS_EQUAL(2, pruned.nrLeaves());
480 
481  // Test eliminate
482  auto [hybridConditional, factor] = fragment.eliminate({X(0)});
483  EXPECT(hybridConditional->isHybrid());
484  EXPECT(hybridConditional->keys() == KeyVector({X(0), X(1), M(0)}));
485 
486  EXPECT(dynamic_pointer_cast<HybridGaussianFactor>(factor));
487  EXPECT(factor->keys() == KeyVector({X(1), M(0)}));
488 
489  bayes_net = hfg2.eliminateSequential(ordering);
490 
491  HybridValues result = bayes_net->optimize();
492 
493  Values expected_continuous;
494  expected_continuous.insert<double>(X(0), 0);
495  expected_continuous.insert<double>(X(1), 1);
496  expected_continuous.insert<double>(X(2), 2);
497  expected_continuous.insert<double>(X(3), 4);
498  Values result_continuous =
500  EXPECT(assert_equal(expected_continuous, result_continuous));
501 
502  DiscreteValues expected_discrete;
503  expected_discrete[M(0)] = 1;
504  expected_discrete[M(1)] = 1;
505  EXPECT(assert_equal(expected_discrete, result.discrete()));
506 }
507 
508 /* ****************************************************************************/
509 // Test hybrid gaussian factor graph errorTree during incremental operation
510 TEST(HybridGaussianFactorGraph, IncrementalErrorTree) {
511  Switching s(4);
512 
514  graph.push_back(s.linearUnaryFactors.at(0)); // f(X0)
515  graph.push_back(s.linearBinaryFactors.at(0)); // f(X0, X1, M0)
516  graph.push_back(s.linearBinaryFactors.at(1)); // f(X1, X2, M1)
517  graph.push_back(s.linearUnaryFactors.at(1)); // f(X1)
518  graph.push_back(s.linearUnaryFactors.at(2)); // f(X2)
519  graph.push_back(s.modeChain.at(0)); // f(M0)
520  graph.push_back(s.modeChain.at(1)); // f(M0, M1)
521 
522  HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
523  EXPECT_LONGS_EQUAL(5, hybridBayesNet->size());
524 
525  // Check discrete posterior at optimum
526  HybridValues delta = hybridBayesNet->optimize();
527  AlgebraicDecisionTree<Key> graphPosterior =
528  graph.discretePosterior(delta.continuous());
529  AlgebraicDecisionTree<Key> bnPosterior =
530  hybridBayesNet->discretePosterior(delta.continuous());
531  EXPECT(assert_equal(graphPosterior, bnPosterior));
532 
534  graph.push_back(*hybridBayesNet);
535  graph.push_back(s.linearBinaryFactors.at(2)); // f(X2, X3, M2)
536  graph.push_back(s.linearUnaryFactors.at(3)); // f(X3)
537 
538  hybridBayesNet = graph.eliminateSequential();
539  EXPECT_LONGS_EQUAL(7, hybridBayesNet->size());
540 
541  delta = hybridBayesNet->optimize();
542  graphPosterior = graph.discretePosterior(delta.continuous());
543  bnPosterior = hybridBayesNet->discretePosterior(delta.continuous());
544  EXPECT(assert_equal(graphPosterior, bnPosterior));
545 }
546 
547 /* ****************************************************************************/
548 // Check that collectProductFactor works correctly.
549 TEST(HybridGaussianFactorGraph, CollectProductFactor) {
550  const int num_measurements = 1;
551  VectorValues vv{{Z(0), Vector1(5.0)}};
552  auto fg = tiny::createHybridGaussianFactorGraph(num_measurements, vv);
553  EXPECT_LONGS_EQUAL(3, fg.size());
554 
555  // Assemble graph tree:
556  auto actual = fg.collectProductFactor();
557 
558  // Create expected decision tree with two factor graphs:
559 
560  // Get hybrid factor:
561  auto hybrid = fg.at<HybridGaussianFactor>(0);
562  CHECK(hybrid);
563 
564  // Get prior factor:
565  const auto gf = fg.at<HybridConditional>(1);
566  CHECK(gf);
567  using GF = GaussianFactor::shared_ptr;
568  const GF prior = gf->asGaussian();
569  CHECK(prior);
570 
571  // Create DiscreteValues for both 0 and 1:
572  DiscreteValues d0{{M(0), 0}}, d1{{M(0), 1}};
573 
574  // Expected decision tree with two factor graphs:
575  // f(x0;mode=0)P(x0)
576  GaussianFactorGraph expectedFG0{(*hybrid)(d0).first, prior};
577  EXPECT(assert_equal(expectedFG0, actual(d0).first, 1e-5));
578  EXPECT(assert_equal(0.0, actual(d0).second, 1e-5));
579 
580  // f(x0;mode=1)P(x0)
581  GaussianFactorGraph expectedFG1{(*hybrid)(d1).first, prior};
582  EXPECT(assert_equal(expectedFG1, actual(d1).first, 1e-5));
583  EXPECT(assert_equal(1.79176, actual(d1).second, 1e-5));
584 }
585 
586 /* ****************************************************************************/
587 // Check that the factor graph unnormalized probability is proportional to the
588 // Bayes net probability for the given measurements.
590  const HybridGaussianFactorGraph &fg, size_t num_samples = 100) {
591  auto compute_ratio = [&](HybridValues *sample) -> double {
592  sample->update(measurements); // update sample with given measurements:
593  return bn.evaluate(*sample) / fg.probPrime(*sample);
594  };
595 
596  HybridValues sample = bn.sample(&kRng);
597  double expected_ratio = compute_ratio(&sample);
598 
599  // Test ratios for a number of independent samples:
600  for (size_t i = 0; i < num_samples; i++) {
601  HybridValues sample = bn.sample(&kRng);
602  if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false;
603  }
604  return true;
605 }
606 
607 /* ****************************************************************************/
608 // Check that the bayes net unnormalized probability is proportional to the
609 // Bayes net probability for the given measurements.
611  const HybridBayesNet &posterior, size_t num_samples = 100) {
612  auto compute_ratio = [&](HybridValues *sample) -> double {
613  sample->update(measurements); // update sample with given measurements:
614  return bn.evaluate(*sample) / posterior.evaluate(*sample);
615  };
616 
617  HybridValues sample = bn.sample(&kRng);
618  double expected_ratio = compute_ratio(&sample);
619 
620  // Test ratios for a number of independent samples:
621  for (size_t i = 0; i < num_samples; i++) {
622  HybridValues sample = bn.sample(&kRng);
623  // std::cout << "ratio: " << compute_ratio(&sample) << std::endl;
624  if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false;
625  }
626  return true;
627 }
628 
629 /* ****************************************************************************/
630 // Check that eliminating tiny net with 1 measurement yields correct result.
631 TEST(HybridGaussianFactorGraph, EliminateTiny1) {
632  const int num_measurements = 1;
633  const VectorValues measurements{{Z(0), Vector1(5.0)}};
634  auto bn = tiny::createHybridBayesNet(num_measurements);
635  auto fg = bn.toFactorGraph(measurements);
636  EXPECT_LONGS_EQUAL(3, fg.size());
637 
638  EXPECT(ratioTest(bn, measurements, fg));
639 
640  // Create expected Bayes Net:
641  HybridBayesNet expectedBayesNet;
642 
643  // Create hybrid Gaussian factor on X(0).
644  using tiny::mode;
645  // regression, but mean checked to be 5.0 in both cases:
646  const auto conditional0 = std::make_shared<GaussianConditional>(
647  X(0), Vector1(14.1421), I_1x1 * 2.82843),
648  conditional1 = std::make_shared<GaussianConditional>(
649  X(0), Vector1(10.1379), I_1x1 * 2.02759);
650  expectedBayesNet.emplace_shared<HybridGaussianConditional>(
651  mode, std::vector{conditional0, conditional1});
652 
653  // Add prior on mode.
654  expectedBayesNet.emplace_shared<TableDistribution>(mode, "74 26");
655 
656  // Test elimination
657  const auto posterior = fg.eliminateSequential();
658  EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01));
659 
660  EXPECT(ratioTest(bn, measurements, *posterior));
661 }
662 
663 /* ****************************************************************************/
664 // Check that eliminating tiny net with 1 measurement with mode order swapped
665 // yields correct result.
666 TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
667  const VectorValues measurements{{Z(0), Vector1(5.0)}};
668 
669  HybridBayesNet bn;
670 
671  // mode-dependent: 1 is low-noise, 0 is high-noise.
672  // Create hybrid Gaussian factor z_0 = x0 + noise for each measurement.
673  std::vector<std::pair<Vector, double>> parms{{Z_1x1, 3}, {Z_1x1, 0.5}};
674  bn.emplace_shared<HybridGaussianConditional>(m1, Z(0), I_1x1, X(0), parms);
675 
676  // Create prior on X(0).
677  bn.push_back(
678  GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5));
679 
680  // Add prior on m1.
682 
683  // bn.print();
684  auto fg = bn.toFactorGraph(measurements);
685  EXPECT_LONGS_EQUAL(3, fg.size());
686 
687  // fg.print();
688 
689  EXPECT(ratioTest(bn, measurements, fg));
690 
691  // Create expected Bayes Net:
692  HybridBayesNet expectedBayesNet;
693 
694  // Create hybrid Gaussian factor on X(0).
695  // regression, but mean checked to be 5.0 in both cases:
696  const auto conditional0 = std::make_shared<GaussianConditional>(
697  X(0), Vector1(10.1379), I_1x1 * 2.02759),
698  conditional1 = std::make_shared<GaussianConditional>(
699  X(0), Vector1(14.1421), I_1x1 * 2.82843);
700  expectedBayesNet.emplace_shared<HybridGaussianConditional>(
701  m1, std::vector{conditional0, conditional1});
702 
703  // Add prior on m1.
704  expectedBayesNet.emplace_shared<TableDistribution>(m1, "0.188638 0.811362");
705 
706  // Test elimination
707  const auto posterior = fg.eliminateSequential();
708  EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01));
709 
710  EXPECT(ratioTest(bn, measurements, *posterior));
711 
712  // posterior->print();
713  // posterior->optimize().print();
714 }
715 
716 /* ****************************************************************************/
717 // Check that eliminating tiny net with 2 measurements yields correct result.
718 TEST(HybridGaussianFactorGraph, EliminateTiny2) {
719  // Create factor graph with 2 measurements such that posterior mean = 5.0.
720  const int num_measurements = 2;
721  const VectorValues measurements{{Z(0), Vector1(4.0)}, {Z(1), Vector1(6.0)}};
722  auto bn = tiny::createHybridBayesNet(num_measurements);
723  auto fg = bn.toFactorGraph(measurements);
724  EXPECT_LONGS_EQUAL(4, fg.size());
725 
726  // Create expected Bayes Net:
727  HybridBayesNet expectedBayesNet;
728 
729  // Create hybrid Gaussian factor on X(0).
730  using tiny::mode;
731  // regression, but mean checked to be 5.0 in both cases:
732  const auto conditional0 = std::make_shared<GaussianConditional>(
733  X(0), Vector1(17.3205), I_1x1 * 3.4641),
734  conditional1 = std::make_shared<GaussianConditional>(
735  X(0), Vector1(10.274), I_1x1 * 2.0548);
736  expectedBayesNet.emplace_shared<HybridGaussianConditional>(
737  mode, std::vector{conditional0, conditional1});
738 
739  // Add prior on mode.
740  // Since this is the only discrete conditional, it is added as a
741  // TableDistribution.
742  expectedBayesNet.emplace_shared<TableDistribution>(mode, "23 77");
743 
744  // Test elimination
745  const auto posterior = fg.eliminateSequential();
746  EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01));
747 
748  EXPECT(ratioTest(bn, measurements, *posterior));
749 }
750 
751 /* ****************************************************************************/
752 // Test eliminating tiny net with 1 mode per measurement.
753 TEST(HybridGaussianFactorGraph, EliminateTiny22) {
754  // Create factor graph with 2 measurements such that posterior mean = 5.0.
755  const int num_measurements = 2;
756  const bool manyModes = true;
757 
758  // Create Bayes net and convert to factor graph.
759  auto bn = tiny::createHybridBayesNet(num_measurements, manyModes);
760  const VectorValues measurements{{Z(0), Vector1(4.0)}, {Z(1), Vector1(6.0)}};
761  auto fg = bn.toFactorGraph(measurements);
762  EXPECT_LONGS_EQUAL(5, fg.size());
763 
764  EXPECT(ratioTest(bn, measurements, fg));
765 
766  // Test elimination
767  const auto posterior = fg.eliminateSequential();
768 
769  EXPECT(ratioTest(bn, measurements, *posterior));
770 }
771 
772 /* ****************************************************************************/
773 // Test elimination of a switching network with one mode per measurement.
774 TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
775  // Create a switching network with one mode per measurement.
776  HybridBayesNet bn;
777 
778  // NOTE: we add reverse topological so we can sample from the Bayes net.:
779 
780  // Add measurements:
781  std::vector<std::pair<Vector, double>> measurementModels{{Z_1x1, 3},
782  {Z_1x1, 0.5}};
783  for (size_t t : {0, 1, 2}) {
784  // Create hybrid Gaussian factor on Z(t) conditioned on X(t) and mode N(t):
785  const auto noise_mode_t = DiscreteKey{N(t), 2};
786  bn.emplace_shared<HybridGaussianConditional>(noise_mode_t, Z(t), I_1x1,
787  X(t), measurementModels);
788 
789  // Create prior on discrete mode N(t):
790  bn.emplace_shared<DiscreteConditional>(noise_mode_t, "20/80");
791  }
792 
793  // Add motion models. TODO(frank): why are they exactly the same?
794  std::vector<std::pair<Vector, double>> motionModels{{Z_1x1, 0.2},
795  {Z_1x1, 0.2}};
796  for (size_t t : {2, 1}) {
797  // Create hybrid Gaussian factor on X(t) conditioned on X(t-1)
798  // and mode M(t-1):
799  const auto motion_model_t = DiscreteKey{M(t), 2};
800  bn.emplace_shared<HybridGaussianConditional>(motion_model_t, X(t), I_1x1,
801  X(t - 1), motionModels);
802 
803  // Create prior on motion model M(t):
804  bn.emplace_shared<DiscreteConditional>(motion_model_t, "40/60");
805  }
806 
807  // Create Gaussian prior on continuous X(0) using sharedMeanAndStddev:
808  bn.push_back(GaussianConditional::sharedMeanAndStddev(X(0), Z_1x1, 0.1));
809 
810  // Make sure we an sample from the Bayes net:
812 
813  // Create measurements consistent with moving right every time:
815  {Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}};
817 
818  // Factor graph is:
819  // D D
820  // | |
821  // m1 m2
822  // | |
823  // C-x0-HC-x1-HC-x2
824  // | | |
825  // HF HF HF
826  // | | |
827  // n0 n1 n2
828  // | | |
829  // D D D
830  EXPECT_LONGS_EQUAL(11, fg.size());
831  EXPECT(ratioTest(bn, measurements, fg));
832 
833  // Do elimination of X(2) only:
834  auto [bn1, fg1] = fg.eliminatePartialSequential(Ordering{X(2)});
835  fg1->push_back(*bn1);
836  EXPECT(ratioTest(bn, measurements, *fg1));
837 
838  // Create ordering that eliminates in time order, then discrete modes:
839  Ordering ordering{X(2), X(1), X(0), N(0), N(1), N(2), M(1), M(2)};
840 
841  // Do elimination:
843 
844  // Test resulting posterior Bayes net has correct size:
845  EXPECT_LONGS_EQUAL(8, posterior->size());
846 
847  // Ratio test
848  EXPECT(ratioTest(bn, measurements, *posterior));
849 }
850 
851 /* ************************************************************************* */
852 int main() {
853  TestResult tr;
854  return TestRegistry::runAllTests(tr);
855 }
856 /* ************************************************************************* */
TestRegistry::runAllTests
static int runAllTests(TestResult &result)
Definition: TestRegistry.cpp:27
different_sigmas::parms
const std::vector< std::pair< Vector, double > > parms
Definition: testHybridBayesNet.cpp:236
gtsam::EliminateableFactorGraph::eliminateSequential
std::shared_ptr< BayesNetType > eliminateSequential(OptionalOrderingType orderingType={}, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Definition: EliminateableFactorGraph-inst.h:29
switching3::switching
const Switching switching(3)
TableDistribution.h
gtsam::TableFactor
Definition: TableFactor.h:54
gtsam::DecisionTreeFactor
Definition: DecisionTreeFactor.h:45
ratioTest
bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, const HybridGaussianFactorGraph &fg, size_t num_samples=100)
Definition: testHybridGaussianFactorGraph.cpp:589
gtsam::HybridValues
Definition: HybridValues.h:37
Vector.h
typedef and functions to augment Eigen's VectorXd
gtsam::HybridConditional
Definition: HybridConditional.h:60
gtsam::Values::size
size_t size() const
Definition: Values.h:178
HybridGaussianConditional.h
A hybrid conditional in the Conditional Linear Gaussian scheme.
s
RealScalar s
Definition: level1_cplx_impl.h:126
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
gtsam::FactorGraph::error
double error(const HybridValues &values) const
Definition: FactorGraph-inst.h:66
EXPECT_LONGS_EQUAL
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
Testable.h
Concept check for values that can be used in unit tests.
EXPECT
#define EXPECT(condition)
Definition: Test.h:150
gtsam::Z_3x1
static const Eigen::MatrixBase< Vector3 >::ConstantReturnType Z_3x1
Definition: Vector.h:47
gtsam::symbol_shorthand::N
Key N(std::uint64_t j)
Definition: inference/Symbol.h:161
TestHarness.h
gtsam::HybridBayesNet
Definition: HybridBayesNet.h:37
gtsam::symbol_shorthand::M
Key M(std::uint64_t j)
Definition: inference/Symbol.h:160
kRng
std::mt19937_64 kRng(42)
gtsam::HybridBayesNet::evaluate
double evaluate(const HybridValues &values) const
Evaluate hybrid probability density for given HybridValues.
Definition: HybridBayesNet.cpp:265
Switching.h
gtsam::JacobianFactor
Definition: JacobianFactor.h:91
m1
static const DiscreteKey m1(M(1), 2)
HybridBayesNet.h
A Bayes net of Gaussian Conditionals indexed by discrete keys.
gtsam::DecisionTreeFactor::error
double error(const DiscreteValues &values) const override
Calculate error for DiscreteValues x, is -log(probability).
Definition: DecisionTreeFactor.cpp:57
gtsam::HybridGaussianProductFactor
Alias for DecisionTree of GaussianFactorGraphs and their scalar sums.
Definition: HybridGaussianProductFactor.h:34
different_sigmas::values
HybridValues values
Definition: testHybridBayesNet.cpp:247
Ordering.h
Variable ordering for the elimination algorithm.
X
#define X
Definition: icosphere.cpp:20
vv
static const VectorValues vv
Definition: testHybridGaussianConditional.cpp:45
exp
const EIGEN_DEVICE_FUNC ExpReturnType exp() const
Definition: ArrayCwiseUnaryOps.h:97
gtsam::KeyVector
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:92
gtsam::NonlinearFactorGraph::probPrime
double probPrime(const Values &values) const
Definition: NonlinearFactorGraph.cpp:49
result
Values result
Definition: OdometryOptimize.cpp:8
HybridGaussianFactor.h
A set of GaussianFactors, indexed by a set of discrete keys.
gtsam::HybridGaussianFactorGraph::collectProductFactor
HybridGaussianProductFactor collectProductFactor() const
Create a decision tree of factor graphs out of this hybrid factor graph.
Definition: HybridGaussianFactorGraph.cpp:180
gtsam::HybridValues::continuous
const VectorValues & continuous() const
Return the multi-dimensional vector values.
Definition: HybridValues.cpp:54
TestableAssertions.h
Provides additional testing facilities for common data structures.
gtsam::AlgebraicDecisionTree< Key >
pruning_fixture::factor
DecisionTreeFactor factor(D &C &B &A, "0.0 0.0 0.0 0.60658897 0.61241912 0.61241969 0.61247685 0.61247742 0.0 " "0.0 0.0 0.99995287 1.0 1.0 1.0 1.0")
gtsam::Switching
ϕ(X(0)) .. ϕ(X(k),X(k+1)) .. ϕ(X(k);z_k) .. ϕ(M(0)) .. ϕ(M(K-3),M(K-2))
Definition: Switching.h:124
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
Key.h
gtsam::GaussianFactorGraph
Definition: GaussianFactorGraph.h:73
gtsam::symbol_shorthand::X
Key X(std::uint64_t j)
Definition: inference/Symbol.h:171
gtsam::NonlinearFactorGraph::error
double error(const Values &values) const
Definition: NonlinearFactorGraph.cpp:170
gtsam::Values::at
const ValueType at(Key j) const
Definition: Values-inl.h:261
Vector1
Eigen::Matrix< double, 1, 1 > Vector1
Definition: testEvent.cpp:33
gtsam::Values::retract
Values retract(const VectorValues &delta) const
Definition: Values.cpp:99
HybridFactor.h
gtsam.examples.PlanarManipulatorExample.delta
def delta(g0, g1)
Definition: PlanarManipulatorExample.py:45
gtsam::VectorValues
Definition: VectorValues.h:74
two::components
std::vector< GaussianFactor::shared_ptr > components(Key key)
Definition: testHybridBayesTree.cpp:60
gtsam::TableDistribution
Definition: TableDistribution.h:39
gtsam::EliminateableFactorGraph::eliminatePartialSequential
std::pair< std::shared_ptr< BayesNetType >, std::shared_ptr< FactorGraphType > > eliminatePartialSequential(const Ordering &ordering, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Definition: EliminateableFactorGraph-inst.h:151
gtsam::symbol_shorthand::Z
Key Z(std::uint64_t j)
Definition: inference/Symbol.h:173
mode
static const DiscreteKey mode(modeKey, 2)
gtsam::Switching::linearUnaryFactors
HybridGaussianFactorGraph linearUnaryFactors
Definition: Switching.h:129
scalar
mxArray * scalar(mxClassID classid)
Definition: matlab.h:82
HybridGaussianProductFactor.h
gtsam::GaussianFactor::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianFactor.h:42
TinyHybridExample.h
gtsam::HybridGaussianConditional
A conditional of gaussian conditionals indexed by discrete variables, as part of a Bayes Network....
Definition: HybridGaussianConditional.h:55
gtsam::HybridGaussianFactorGraph::eliminate
std::pair< std::shared_ptr< HybridConditional >, std::shared_ptr< Factor > > eliminate(const Ordering &keys) const
Eliminate the given continuous keys.
Definition: HybridGaussianFactorGraph.cpp:449
gtsam::HybridGaussianFactorGraph
Definition: HybridGaussianFactorGraph.h:106
Symbol.h
BayesNet.h
Bayes network.
EXPECT_DOUBLES_EQUAL
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
ordering
static enum @1096 ordering
TestResult
Definition: TestResult.h:26
m2
static const DiscreteKey m2(M(2), 2)
DiscreteKey.h
specialized key for discrete variables
TEST
TEST(HybridGaussianFactorGraph, Creation)
Definition: testHybridGaussianFactorGraph.cpp:64
key
const gtsam::Symbol key('X', 0)
JacobianFactor.h
estimation_fixture::measurements
std::vector< double > measurements
Definition: testHybridEstimation.cpp:52
gtsam::FactorGraph::size
size_t size() const
Definition: FactorGraph.h:297
gtsam::HybridBayesNet::push_back
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
Definition: HybridBayesNet.h:76
gtsam::DiscreteConditional
Definition: DiscreteConditional.h:37
different_sigmas::gc
const auto gc
Definition: testHybridBayesNet.cpp:233
Test.h
HybridGaussianFactorGraph.h
Linearized Hybrid factor graph that uses type erasure.
gtsam::tiny::createHybridGaussianFactorGraph
HybridGaussianFactorGraph createHybridGaussianFactorGraph(size_t num_measurements=1, std::optional< VectorValues > measurements={}, bool manyModes=false)
Definition: TinyHybridExample.h:70
gtsam
traits
Definition: SFMdata.h:40
gtsam::HybridValues::atDiscrete
size_t & atDiscrete(Key j)
Definition: HybridValues.cpp:132
DiscreteValues.h
error
static double error
Definition: testRot3.cpp:37
gtsam::HybridGaussianFactorGraph::probPrime
double probPrime(const HybridValues &values) const
Compute the unnormalized posterior probability for a continuous vector values given a specific assign...
Definition: HybridGaussianFactorGraph.cpp:645
gtsam::DiscreteValues
Definition: DiscreteValues.h:34
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
gtsam::Values
Definition: Values.h:65
gtsam::Factor::keys
const KeyVector & keys() const
Access the factor's involved variable keys.
Definition: Factor.h:143
CHECK
#define CHECK(condition)
Definition: Test.h:108
gtsam::HybridBayesNet::sample
HybridValues sample(const HybridValues &given, std::mt19937_64 *rng) const
Sample from an incomplete BayesNet, given missing variables.
Definition: HybridBayesNet.cpp:184
gtsam::DiscreteKey
std::pair< Key, size_t > DiscreteKey
Definition: DiscreteKey.h:38
gtsam::Switching::linearBinaryFactors
HybridGaussianFactorGraph linearBinaryFactors
Definition: Switching.h:129
std
Definition: BFloat16.h:88
gtsam::Switching::linearizationPoint
Values linearizationPoint
Definition: Switching.h:130
gtsam::HybridGaussianFactor
Implementation of a discrete-conditioned hybrid factor. Implements a joint discrete-continuous factor...
Definition: HybridGaussianFactor.h:60
gtsam::Values::insert
void insert(Key j, const Value &val)
Definition: Values.cpp:156
leaf
Definition: testExpressionFactor.cpp:42
gtsam::VectorValues::size
size_t size() const
Definition: VectorValues.h:133
gtsam::assert_equal
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:41
product
void product(const MatrixType &m)
Definition: product.h:20
different_sigmas::prior
const auto prior
Definition: testHybridBayesNet.cpp:240
gtsam::tiny::createHybridBayesNet
HybridBayesNet createHybridBayesNet(size_t num_measurements=1, bool manyModes=false)
Definition: TinyHybridExample.h:39
abs
#define abs(x)
Definition: datatypes.h:17
N
#define N
Definition: igam.h:9
different_sigmas::hgc
const auto hgc
Definition: testHybridBayesNet.cpp:238
test_motion::components
std::vector< NoiseModelFactor::shared_ptr > components
Definition: testHybridNonlinearFactorGraph.cpp:121
gtsam::HybridBayesNet::shared_ptr
std::shared_ptr< HybridBayesNet > shared_ptr
Definition: HybridBayesNet.h:42
gtsam::HybridBayesNet::toFactorGraph
HybridGaussianFactorGraph toFactorGraph(const VectorValues &measurements) const
Definition: HybridBayesNet.cpp:270
gtsam::FactorGraph::add
IsDerived< DERIVEDFACTOR > add(std::shared_ptr< DERIVEDFACTOR > factor)
add is a synonym for push_back.
Definition: FactorGraph.h:171
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
align_3::t
Point2 t(10, 10)
gtsam::Factor::size
size_t size() const
Definition: Factor.h:160
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
Z
#define Z
Definition: icosphere.cpp:21
two
Definition: testHybridBayesTree.cpp:59
HybridValues.h
gtsam::Ordering
Definition: inference/Ordering.h:33
DecisionTreeFactor.h
LONGS_EQUAL
#define LONGS_EQUAL(expected, actual)
Definition: Test.h:134
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9
simple_graph::factor1
auto factor1
Definition: testJacobianFactor.cpp:196
m0
static const DiscreteKey m0(M(0), 2)
gtsam::FactorGraph::emplace_shared
IsDerived< DERIVEDFACTOR > emplace_shared(Args &&... args)
Emplace a shared pointer to factor of given type.
Definition: FactorGraph.h:153
main
int main()
Definition: testHybridGaussianFactorGraph.cpp:852
gtsam::HybridBayesNet::emplace_shared
void emplace_shared(Args &&...args)
Definition: HybridBayesNet.h:116
HybridConditional.h
M
Matrix< RealScalar, Dynamic, Dynamic > M
Definition: bench_gemm.cpp:51


gtsam
Author(s):
autogenerated on Wed Mar 19 2025 03:06:50