testHybridBayesNet.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
21 #include <gtsam/base/Testable.h>
28 
29 #include "Switching.h"
30 #include "TinyHybridExample.h"
31 
32 // Include for test suite
34 
35 #include <memory>
36 
37 using namespace std;
38 using namespace gtsam;
39 
43 
44 static const Key asiaKey = 0;
45 static const DiscreteKey Asia(asiaKey, 2);
46 
47 /* ****************************************************************************/
48 // Test creation of a pure discrete Bayes net.
49 TEST(HybridBayesNet, Creation) {
52 
54  CHECK(bayesNet.at(0)->asDiscrete());
55  EXPECT(assert_equal(expected, *bayesNet.at(0)->asDiscrete()));
56 }
57 
58 /* ****************************************************************************/
59 // Test adding a Bayes net to another one.
63 
65  other.add(bayesNet);
67 }
68 
69 /* ****************************************************************************/
70 // Test API for a pure discrete Bayes net P(Asia).
71 TEST(HybridBayesNet, EvaluatePureDiscrete) {
73  const auto pAsia = std::make_shared<DiscreteConditional>(Asia, "4/6");
74  bayesNet.push_back(pAsia);
75  HybridValues zero{{}, {{asiaKey, 0}}}, one{{}, {{asiaKey, 1}}};
76 
77  // choose
79  EXPECT(assert_equal(empty, bayesNet.choose(zero.discrete()), 1e-9));
80 
81  // evaluate
84 
85  // optimize
87  EXPECT(assert_equal(VectorValues{}, bayesNet.optimize(one.discrete())));
88 
89  // sample
90  std::mt19937_64 rng(42);
92  EXPECT(assert_equal(one, bayesNet.sample(one, &rng)));
94 
95  // prune
97  EXPECT_LONGS_EQUAL(1, bayesNet.prune(1).at(0)->size());
98 
99  // error
101  EXPECT_DOUBLES_EQUAL(-log(0.6), bayesNet.error(one), 1e-9);
102 
103  // errorTree
106 
107  // logProbability
110 
111  // discretePosterior
112  AlgebraicDecisionTree<Key> expectedPosterior(asiaKey, 0.4, 0.6);
113  EXPECT(assert_equal(expectedPosterior, bayesNet.discretePosterior({})));
114 
115  // toFactorGraph
116  HybridGaussianFactorGraph expectedFG{pAsia}, fg = bayesNet.toFactorGraph({});
117  EXPECT(assert_equal(expectedFG, fg));
118 }
119 
120 /* ****************************************************************************/
121 // Test API for a tiny hybrid Bayes net.
123  auto bayesNet = tiny::createHybridBayesNet(); // P(z|x,mode)P(x)P(mode)
125 
126  const VectorValues vv{{Z(0), Vector1(5.0)}, {X(0), Vector1(5.0)}};
127  HybridValues zero{vv, {{M(0), 0}}}, one{vv, {{M(0), 1}}};
128 
129  // Check Invariants for components
131  GaussianConditional::shared_ptr gc0 = hgc->choose(zero.discrete()),
132  gc1 = hgc->choose(one.discrete());
133  GaussianConditional::shared_ptr px = bayesNet.at(1)->asGaussian();
134  GaussianConditional::CheckInvariants(*gc0, vv);
135  GaussianConditional::CheckInvariants(*gc1, vv);
136  GaussianConditional::CheckInvariants(*px, vv);
137  HybridGaussianConditional::CheckInvariants(*hgc, zero);
138  HybridGaussianConditional::CheckInvariants(*hgc, one);
139 
140  // choose
141  GaussianBayesNet expectedChosen;
142  expectedChosen.push_back(gc0);
143  expectedChosen.push_back(px);
144  auto chosen0 = bayesNet.choose(zero.discrete());
145  auto chosen1 = bayesNet.choose(one.discrete());
146  EXPECT(assert_equal(expectedChosen, chosen0, 1e-9));
147 
148  // logProbability
149  const double logP0 = chosen0.logProbability(vv) + log(0.4); // 0.4 is prior
150  const double logP1 = chosen1.logProbability(vv) + log(0.6); // 0.6 is prior
153 
154  // evaluate
156 
157  // optimize
159  EXPECT(assert_equal(chosen0.optimize(), bayesNet.optimize(zero.discrete())));
160 
161  // sample. Not deterministic !!! TODO(Frank): figure out why
162  // std::mt19937_64 rng(42);
163  // EXPECT(assert_equal({{M(0), 1}}, bayesNet.sample(&rng).discrete()));
164 
165  // prune
166  auto pruned = bayesNet.prune(1);
167  CHECK(pruned.at(1)->asHybrid());
168  EXPECT_LONGS_EQUAL(1, pruned.at(1)->asHybrid()->nrComponents());
169  EXPECT(!pruned.equals(bayesNet));
170 
171  // error
172  const double error0 = chosen0.error(vv) + gc0->negLogConstant() -
173  px->negLogConstant() - log(0.4);
174  const double error1 = chosen1.error(vv) + gc1->negLogConstant() -
175  px->negLogConstant() - log(0.6);
176  // print errors:
177  EXPECT_DOUBLES_EQUAL(error0, bayesNet.error(zero), 1e-9);
178  EXPECT_DOUBLES_EQUAL(error1, bayesNet.error(one), 1e-9);
179  EXPECT_DOUBLES_EQUAL(error0 + logP0, error1 + logP1, 1e-9);
180 
181  // errorTree
182  AlgebraicDecisionTree<Key> expected(M(0), error0, error1);
184 
185  // discretePosterior
186  // We have: P(z|x,mode)P(x)P(mode). When we condition on z and x, we get
187  // P(mode|z,x) \propto P(z|x,mode)P(x)P(mode)
188  // Normalizing this yields posterior P(mode|z,x) = {0.8, 0.2}
189  double q0 = std::exp(logP0), q1 = std::exp(logP1), sum = q0 + q1;
190  AlgebraicDecisionTree<Key> expectedPosterior(M(0), q0 / sum, q1 / sum);
191  EXPECT(assert_equal(expectedPosterior, bayesNet.discretePosterior(vv)));
192 
193  // toFactorGraph
194  auto fg = bayesNet.toFactorGraph({{Z(0), Vector1(5.0)}});
195  EXPECT_LONGS_EQUAL(3, fg.size());
196 
197  // Create the product factor for eliminating x0:
198  HybridGaussianFactorGraph factors_x0;
199  factors_x0.push_back(fg.at(0));
200  factors_x0.push_back(fg.at(1));
201  auto productFactor = factors_x0.collectProductFactor();
202 
203  // Check that scalars are 0 and 1.79 (regression)
204  EXPECT_DOUBLES_EQUAL(0.0, productFactor({{M(0), 0}}).second, 1e-9);
205  EXPECT_DOUBLES_EQUAL(1.791759, productFactor({{M(0), 1}}).second, 1e-5);
206 
207  // Call eliminate and check scalar:
208  auto result = factors_x0.eliminate({X(0)});
209  auto df = std::dynamic_pointer_cast<DecisionTreeFactor>(result.second);
210 
211  // Check that the ratio of probPrime to evaluate is the same for all modes.
212  std::vector<double> ratio(2);
213  ratio[0] = std::exp(-fg.error(zero)) / bayesNet.evaluate(zero);
214  ratio[1] = std::exp(-fg.error(one)) / bayesNet.evaluate(one);
215  EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8);
216 
217  // Better and more general test:
218  // Since ϕ(M, x) \propto P(M,x|z) the discretePosteriors should agree
219  q0 = std::exp(-fg.error(zero));
220  q1 = std::exp(-fg.error(one));
221  sum = q0 + q1;
222  EXPECT(assert_equal(expectedPosterior, {M(0), q0 / sum, q1 / sum}));
223  VectorValues xv{{X(0), Vector1(5.0)}};
224  auto fgPosterior = fg.discretePosterior(xv);
225  EXPECT(assert_equal(expectedPosterior, fgPosterior));
226 }
227 
228 /* ****************************************************************************/
229 // Hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
230 namespace different_sigmas {
231 const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1),
232  Vector1(-4.0), 5.0);
233 
234 const std::vector<std::pair<Vector, double>> parms{{Vector1(5), 2.0},
235  {Vector1(2), 3.0}};
236 const auto hgc = std::make_shared<HybridGaussianConditional>(Asia, X(1), parms);
237 
238 const auto prior = std::make_shared<DiscreteConditional>(Asia, "99/1");
239 auto wrap = [](const auto& c) {
240  return std::make_shared<HybridConditional>(c);
241 };
243 
244 // Create values at which to evaluate.
245 HybridValues values{{{X(0), Vector1(-6)}, {X(1), Vector1(1)}}, {{asiaKey, 0}}};
246 } // namespace different_sigmas
247 
248 /* ****************************************************************************/
249 // Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
250 TEST(HybridBayesNet, evaluateHybrid) {
251  using namespace different_sigmas;
252 
253  const double conditionalProbability = gc->evaluate(values.continuous());
254  const double mixtureProbability = hgc->evaluate(values);
255  EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99,
256  bayesNet.evaluate(values), 1e-9);
257 }
258 
259 /* ****************************************************************************/
260 // Test choosing an assignment of conditionals
262  Switching s(4);
263 
264  const Ordering ordering(s.linearizationPoint.keys());
265 
266  const auto [hybridBayesNet, remainingFactorGraph] =
267  s.linearizedFactorGraph().eliminatePartialSequential(ordering);
268 
269  DiscreteValues assignment;
270  assignment[M(0)] = 1;
271  assignment[M(1)] = 1;
272  assignment[M(2)] = 0;
273 
274  GaussianBayesNet gbn = hybridBayesNet->choose(assignment);
275 
277 
278  EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment),
279  *gbn.at(0)));
280  EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment),
281  *gbn.at(1)));
282  EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment),
283  *gbn.at(2)));
284  EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment),
285  *gbn.at(3)));
286 }
287 
288 /* ****************************************************************************/
289 // Test Bayes net optimize
290 TEST(HybridBayesNet, OptimizeAssignment) {
291  Switching s(4);
292 
293  const Ordering ordering(s.linearizationPoint.keys());
294 
295  const auto [hybridBayesNet, remainingFactorGraph] =
296  s.linearizedFactorGraph().eliminatePartialSequential(ordering);
297 
298  DiscreteValues assignment;
299  assignment[M(0)] = 1;
300  assignment[M(1)] = 1;
301  assignment[M(2)] = 1;
302 
303  VectorValues delta = hybridBayesNet->optimize(assignment);
304 
305  // The linearization point has the same value as the key index,
306  // e.g. X(0) = 1, X(1) = 2,
307  // but the factors specify X(k) = k-1, so delta should be -1.
308  VectorValues expected_delta;
309  expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
310  expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
311  expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
312  expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
313 
314  EXPECT(assert_equal(expected_delta, delta));
315 }
316 
317 /* ****************************************************************************/
318 // Test Bayes net optimize
319 TEST(HybridBayesNet, Optimize) {
320  Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1");
321 
322  HybridBayesNet::shared_ptr hybridBayesNet =
323  s.linearizedFactorGraph().eliminateSequential();
324 
325  HybridValues delta = hybridBayesNet->optimize();
326 
327  // NOTE: The true assignment is 111, but the discrete priors cause 101
328  DiscreteValues expectedAssignment;
329  expectedAssignment[M(0)] = 1;
330  expectedAssignment[M(1)] = 1;
331  expectedAssignment[M(2)] = 1;
332  EXPECT(assert_equal(expectedAssignment, delta.discrete()));
333 
334  VectorValues expectedValues;
335  expectedValues.insert(X(0), -Vector1::Ones());
336  expectedValues.insert(X(1), -Vector1::Ones());
337  expectedValues.insert(X(2), -Vector1::Ones());
338  expectedValues.insert(X(3), -Vector1::Ones());
339 
340  EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
341 }
342 
343 /* ****************************************************************************/
344 // Test Bayes net error
345 TEST(HybridBayesNet, Pruning) {
346  // Create switching network with three continuous variables and two discrete:
347  // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1)
348  Switching s(3);
349 
350  HybridBayesNet::shared_ptr posterior =
351  s.linearizedFactorGraph().eliminateSequential();
352  EXPECT_LONGS_EQUAL(5, posterior->size());
353 
354  // Optimize
355  HybridValues delta = posterior->optimize();
356 
357  // Verify discrete posterior at optimal value sums to 1.
358  auto discretePosterior = posterior->discretePosterior(delta.continuous());
359  EXPECT_DOUBLES_EQUAL(1.0, discretePosterior.sum(), 1e-9);
360 
361  // Regression test on discrete posterior at optimal value.
362  std::vector<double> leaves = {0.095516068, 0.31800092, 0.27798511, 0.3084979};
363  AlgebraicDecisionTree<Key> expected(s.modes, leaves);
364  EXPECT(assert_equal(expected, discretePosterior, 1e-6));
365 
366  // Verify logProbability computation and check specific logProbability value
367  const DiscreteValues discrete_values{{M(0), 1}, {M(1), 1}};
368  const HybridValues hybridValues{delta.continuous(), discrete_values};
369  double logProbability = 0;
370  logProbability += posterior->at(0)->asHybrid()->logProbability(hybridValues);
371  logProbability += posterior->at(1)->asHybrid()->logProbability(hybridValues);
372  logProbability += posterior->at(2)->asHybrid()->logProbability(hybridValues);
373  logProbability +=
374  posterior->at(3)->asDiscrete()->logProbability(hybridValues);
375  logProbability +=
376  posterior->at(4)->asDiscrete()->logProbability(hybridValues);
377  EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues),
378  1e-9);
379 
380  double negLogConstant = posterior->negLogConstant(discrete_values);
381 
382  // The sum of all the mode densities
383  double normalizer =
384  AlgebraicDecisionTree<Key>(posterior->errorTree(delta.continuous()),
385  [](double error) { return exp(-error); })
386  .sum();
387 
388  // Check agreement with discrete posterior
389  double density = exp(logProbability + negLogConstant) / normalizer;
390  EXPECT_DOUBLES_EQUAL(density, discretePosterior(discrete_values), 1e-6);
391 
392  // Prune and get probabilities
393  auto prunedBayesNet = posterior->prune(2);
394  auto prunedTree = prunedBayesNet.discretePosterior(delta.continuous());
395 
396  // Regression test on pruned logProbability tree
397  std::vector<double> pruned_leaves = {0.0, 0.50758422, 0.0, 0.49241578};
398  AlgebraicDecisionTree<Key> expected_pruned(s.modes, pruned_leaves);
399  EXPECT(assert_equal(expected_pruned, prunedTree, 1e-6));
400 
401  // Regression
402  double pruned_logProbability = 0;
403  pruned_logProbability +=
404  prunedBayesNet.at(0)->asDiscrete()->logProbability(hybridValues);
405  pruned_logProbability +=
406  prunedBayesNet.at(1)->asHybrid()->logProbability(hybridValues);
407  pruned_logProbability +=
408  prunedBayesNet.at(2)->asHybrid()->logProbability(hybridValues);
409  pruned_logProbability +=
410  prunedBayesNet.at(3)->asHybrid()->logProbability(hybridValues);
411 
412  double pruned_negLogConstant = prunedBayesNet.negLogConstant(discrete_values);
413 
414  // The sum of all the mode densities
415  double pruned_normalizer =
416  AlgebraicDecisionTree<Key>(prunedBayesNet.errorTree(delta.continuous()),
417  [](double error) { return exp(-error); })
418  .sum();
419  double pruned_density =
420  exp(pruned_logProbability + pruned_negLogConstant) / pruned_normalizer;
421  EXPECT_DOUBLES_EQUAL(pruned_density, prunedTree(discrete_values), 1e-9);
422 }
423 
424 /* ****************************************************************************/
425 // Test Bayes net pruning
427  Switching s(4);
428 
429  HybridBayesNet::shared_ptr posterior =
430  s.linearizedFactorGraph().eliminateSequential();
431  EXPECT_LONGS_EQUAL(7, posterior->size());
432 
433  HybridValues delta = posterior->optimize();
434 
435  auto prunedBayesNet = posterior->prune(2);
436  HybridValues pruned_delta = prunedBayesNet.optimize();
437 
438  EXPECT(assert_equal(delta.discrete(), pruned_delta.discrete()));
439  EXPECT(assert_equal(delta.continuous(), pruned_delta.continuous()));
440 }
441 
442 /* ****************************************************************************/
443 // Test Bayes net updateDiscreteConditionals
444 TEST(HybridBayesNet, UpdateDiscreteConditionals) {
445  Switching s(4);
446 
447  HybridBayesNet::shared_ptr posterior =
448  s.linearizedFactorGraph().eliminateSequential();
449  EXPECT_LONGS_EQUAL(7, posterior->size());
450 
451  DiscreteConditional joint;
452  for (auto&& conditional : posterior->discreteMarginal()) {
453  joint = joint * (*conditional);
454  }
455 
456  size_t maxNrLeaves = 3;
457  auto prunedDecisionTree = joint.prune(maxNrLeaves);
458 
459 #ifdef GTSAM_DT_MERGING
460  EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/,
461  prunedDecisionTree.nrLeaves());
462 #else
463  EXPECT_LONGS_EQUAL(8 /*full tree*/, prunedDecisionTree.nrLeaves());
464 #endif
465 
466  // regression
467  // NOTE(Frank): I had to include *three* non-zeroes here now.
468  DecisionTreeFactor::ADT potentials(
469  s.modes,
470  std::vector<double>{0, 0, 0, 0.28739288, 0, 0.43106901, 0, 0.2815381});
471  DiscreteConditional expectedConditional(3, s.modes, potentials);
472 
473  // Prune!
474  auto pruned = posterior->prune(maxNrLeaves);
475 
476  // Functor to verify values against the expectedConditional
477  auto checker = [&](const Assignment<Key>& assignment,
478  double probability) -> double {
479  // typecast so we can use this to get probability value
480  DiscreteValues choices(assignment);
481  if (prunedDecisionTree(choices) == 0) {
482  EXPECT_DOUBLES_EQUAL(0.0, probability, 1e-9);
483  } else {
484  EXPECT_DOUBLES_EQUAL(expectedConditional(choices), probability, 1e-6);
485  }
486  return 0.0;
487  };
488 
489  // Get the pruned discrete conditionals as an AlgebraicDecisionTree
490  CHECK(pruned.at(0)->asDiscrete());
491  auto pruned_discrete_conditionals = pruned.at(0)->asDiscrete();
492  auto discrete_conditional_tree =
493  std::dynamic_pointer_cast<DecisionTreeFactor::ADT>(
494  pruned_discrete_conditionals);
495 
496  // The checker functor verifies the values for us.
497  discrete_conditional_tree->apply(checker);
498 }
499 
500 /* ****************************************************************************/
501 // Test HybridBayesNet sampling.
502 TEST(HybridBayesNet, Sampling) {
504 
505  auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0));
507 
508  auto zero_motion =
509  std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
510  auto one_motion =
511  std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
513  DiscreteKey(M(0), 2),
514  std::vector<NoiseModelFactor::shared_ptr>{zero_motion, one_motion});
515 
516  DiscreteKey mode(M(0), 2);
518 
519  Values initial;
520  double z0 = 0.0, z1 = 1.0;
521  initial.insert<double>(X(0), z0);
522  initial.insert<double>(X(1), z1);
523 
524  // Create the factor graph from the nonlinear factor graph.
526  // Eliminate into BN
527  HybridBayesNet::shared_ptr bn = fg->eliminateSequential();
528 
529  // Set up sampling
530  std::mt19937_64 gen(11);
531 
532  // Initialize containers for computing the mean values.
533  vector<double> discrete_samples;
534  VectorValues average_continuous;
535 
536  size_t num_samples = 1000;
537  for (size_t i = 0; i < num_samples; i++) {
538  // Sample
539  HybridValues sample = bn->sample(&gen);
540 
541  discrete_samples.push_back(sample.discrete().at(M(0)));
542 
543  if (i == 0) {
544  average_continuous.insert(sample.continuous());
545  } else {
546  average_continuous += sample.continuous();
547  }
548  }
549 
550  EXPECT_LONGS_EQUAL(2, average_continuous.size());
551  EXPECT_LONGS_EQUAL(num_samples, discrete_samples.size());
552 
553  // Regressions don't work across platforms :-(
554  // // regression for specific RNG seed
555  // double discrete_sum =
556  // std::accumulate(discrete_samples.begin(), discrete_samples.end(),
557  // decltype(discrete_samples)::value_type(0));
558  // EXPECT_DOUBLES_EQUAL(0.477, discrete_sum / num_samples, 1e-9);
559 
560  // VectorValues expected;
561  // expected.insert({X(0), Vector1(-0.0131207162712)});
562  // expected.insert({X(1), Vector1(-0.499026377568)});
563  // // regression for specific RNG seed
564  // EXPECT(assert_equal(expected, average_continuous.scale(1.0 /
565  // num_samples)));
566 }
567 
568 /* ****************************************************************************/
569 // Test hybrid gaussian factor graph errorTree when
570 // there is a HybridConditional in the graph
571 TEST(HybridBayesNet, ErrorTreeWithConditional) {
572  using symbol_shorthand::F;
573 
574  Key z0 = Z(0), f01 = F(0);
575  Key x0 = X(0), x1 = X(1);
576 
577  HybridBayesNet hbn;
578 
579  auto prior_model = noiseModel::Isotropic::Sigma(1, 1e-1);
580  auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0);
581 
582  // Set a prior P(x0) at x0=0
583  hbn.emplace_shared<GaussianConditional>(x0, Vector1(0.0), I_1x1, prior_model);
584 
585  // Add measurement P(z0 | x0)
586  hbn.emplace_shared<GaussianConditional>(z0, Vector1(0.0), -I_1x1, x0, I_1x1,
587  measurement_model);
588 
589  // Add hybrid motion model
590  double mu = 0.0;
591  double sigma0 = 1e2, sigma1 = 1e-2;
592  auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
593  auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
594  auto c0 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
595  x0, -I_1x1, model0),
596  c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
597  x0, -I_1x1, model1);
598  DiscreteKey m1(M(2), 2);
599  hbn.emplace_shared<HybridGaussianConditional>(m1, std::vector{c0, c1});
600 
601  // Discrete uniform prior.
602  hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
603 
604  VectorValues given;
605  given.insert(z0, Vector1(0.0));
606  given.insert(f01, Vector1(0.0));
607  auto gfg = hbn.toFactorGraph(given);
608 
610  vv.insert(x0, Vector1(1.0));
611  vv.insert(x1, Vector1(2.0));
612  AlgebraicDecisionTree<Key> errorTree = gfg.errorTree(vv);
613 
614  // regression
615  AlgebraicDecisionTree<Key> expected(m1, 60.028538, 5050.8181);
616  EXPECT(assert_equal(expected, errorTree, 1e-4));
617 }
618 
619 /* ************************************************************************* */
620 int main() {
621  TestResult tr;
622  return TestRegistry::runAllTests(tr);
623 }
624 /* ************************************************************************* */
TestRegistry::runAllTests
static int runAllTests(TestResult &result)
Definition: TestRegistry.cpp:27
different_sigmas::parms
const std::vector< std::pair< Vector, double > > parms
Definition: testHybridBayesNet.cpp:234
gtsam::HybridBayesNet::equals
bool equals(const This &fg, double tol=1e-9) const
GTSAM-style equals.
Definition: HybridBayesNet.cpp:39
gtsam::HybridGaussianFactorGraph::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to This
Definition: HybridGaussianFactorGraph.h:120
simple_graph::sigma1
double sigma1
Definition: testJacobianFactor.cpp:193
gtsam::HybridValues
Definition: HybridValues.h:37
rng
static std::mt19937 rng
Definition: timeFactorOverhead.cpp:31
Asia
static const DiscreteKey Asia(asiaKey, 2)
gtsam::HybridGaussianConditional::shared_ptr
std::shared_ptr< This > shared_ptr
Definition: HybridGaussianConditional.h:59
s
RealScalar s
Definition: level1_cplx_impl.h:126
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
gtsam::FactorGraph::error
double error(const HybridValues &values) const
Definition: FactorGraph-inst.h:66
EXPECT_LONGS_EQUAL
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
Testable.h
Concept check for values that can be used in unit tests.
EXPECT
#define EXPECT(condition)
Definition: Test.h:150
TestHarness.h
gtsam::HybridBayesNet
Definition: HybridBayesNet.h:37
gtsam::HybridNonlinearFactorGraph
Definition: HybridNonlinearFactorGraph.h:33
mu
double mu
Definition: testBoundingConstraint.cpp:37
gtsam::HybridBayesNet::evaluate
double evaluate(const HybridValues &values) const
Evaluate hybrid probability density for given HybridValues.
Definition: HybridBayesNet.cpp:234
gtsam::VectorValues::insert
iterator insert(const std::pair< Key, Vector > &key_value)
Definition: VectorValues.cpp:90
c
Scalar Scalar * c
Definition: benchVecAdd.cpp:17
z1
Point2 z1
Definition: testTriangulationFactor.cpp:45
gtsam::DiscreteDistribution
Definition: DiscreteDistribution.h:33
initial
Values initial
Definition: OdometryOptimize.cpp:2
Switching.h
gtsam::HybridBayesNet::prune
HybridBayesNet prune(size_t maxNrLeaves) const
Prune the Bayes Net such that we have at most maxNrLeaves leaves.
Definition: HybridBayesNet.cpp:48
gtsam::DecisionTreeFactor::prune
DecisionTreeFactor prune(size_t maxNrAssignments) const
Prune the decision tree of discrete variables.
Definition: DecisionTreeFactor.cpp:464
wrap
mxArray * wrap(const Class &value)
Definition: matlab.h:126
gtsam::PriorFactor
Definition: nonlinear/PriorFactor.h:30
m1
Matrix3d m1
Definition: IOFormat.cpp:2
HybridBayesNet.h
A Bayes net of Gaussian Conditionals indexed by discrete keys.
different_sigmas::values
HybridValues values
Definition: testHybridBayesNet.cpp:245
log
const EIGEN_DEVICE_FUNC LogReturnType log() const
Definition: ArrayCwiseUnaryOps.h:128
X
#define X
Definition: icosphere.cpp:20
vv
static const VectorValues vv
Definition: testHybridGaussianConditional.cpp:44
exp
const EIGEN_DEVICE_FUNC ExpReturnType exp() const
Definition: ArrayCwiseUnaryOps.h:97
result
Values result
Definition: OdometryOptimize.cpp:8
main
int main()
Definition: testHybridBayesNet.cpp:620
test_motion::noise_model
auto noise_model
Definition: testHybridNonlinearFactorGraph.cpp:119
gtsam::HybridGaussianFactorGraph::collectProductFactor
HybridGaussianProductFactor collectProductFactor() const
Create a decision tree of factor graphs out of this hybrid factor graph.
Definition: HybridGaussianFactorGraph.cpp:177
gtsam::HybridValues::continuous
const VectorValues & continuous() const
Return the multi-dimensional vector values.
Definition: HybridValues.cpp:54
gtsam::AlgebraicDecisionTree< Key >
c1
static double c1
Definition: airy.c:54
gtsam::Switching
ϕ(X(0)) .. ϕ(X(k),X(k+1)) .. ϕ(X(k);z_k) .. ϕ(M(0)) .. ϕ(M(K-3),M(K-2))
Definition: Switching.h:124
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
gtsam::HybridBayesNet::optimize
HybridValues optimize() const
Solve the HybridBayesNet by first computing the MPE of all the discrete variables and then optimizing...
Definition: HybridBayesNet.cpp:123
Vector1
Eigen::Matrix< double, 1, 1 > Vector1
Definition: testEvent.cpp:33
different_sigmas::bayesNet
const HybridBayesNet bayesNet
Definition: testHybridBayesNet.cpp:242
gtsam.examples.PlanarManipulatorExample.delta
def delta(g0, g1)
Definition: PlanarManipulatorExample.py:45
gtsam::VectorValues
Definition: VectorValues.h:74
x1
Pose3 x1
Definition: testPose3.cpp:663
mode
static const DiscreteKey mode(modeKey, 2)
gtsam::HybridBayesNet::errorTree
AlgebraicDecisionTree< Key > errorTree(const VectorValues &continuousValues) const
Compute the negative log posterior log P'(M|x) of all assignments up to a constant,...
Definition: HybridBayesNet.cpp:188
DiscreteFactor.h
TinyHybridExample.h
gtsam::GaussianConditional
Definition: GaussianConditional.h:40
zero
EIGEN_DONT_INLINE Scalar zero()
Definition: svd_common.h:296
cholesky::expected
Matrix expected
Definition: testMatrix.cpp:971
gtsam::HybridGaussianConditional
A conditional of gaussian conditionals indexed by discrete variables, as part of a Bayes Network....
Definition: HybridGaussianConditional.h:54
x0
static Symbol x0('x', 0)
gtsam::HybridGaussianFactorGraph::eliminate
std::pair< std::shared_ptr< HybridConditional >, std::shared_ptr< Factor > > eliminate(const Ordering &keys) const
Eliminate the given continuous keys.
Definition: HybridGaussianFactorGraph.cpp:358
gtsam::HybridGaussianFactorGraph
Definition: HybridGaussianFactorGraph.h:106
density
Definition: testGaussianConditional.cpp:127
gtsam::Assignment< Key >
TEST
TEST(HybridBayesNet, Creation)
Definition: testHybridBayesNet.cpp:49
HybridBayesTree.h
Hybrid Bayes Tree, the result of eliminating a HybridJunctionTree.
sampling::gbn
static const GaussianBayesNet gbn
Definition: testGaussianBayesNet.cpp:171
gtsam::symbol_shorthand::F
Key F(std::uint64_t j)
Definition: inference/Symbol.h:153
EXPECT_DOUBLES_EQUAL
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
ordering
static enum @1096 ordering
TestResult
Definition: TestResult.h:26
gtsam::AlgebraicDecisionTree::sum
double sum() const
Compute sum of all values.
Definition: AlgebraicDecisionTree.h:208
gtsam::HybridBayesNet::discretePosterior
AlgebraicDecisionTree< Key > discretePosterior(const VectorValues &continuousValues) const
Compute normalized posterior P(M|X=x) and return as a tree.
Definition: HybridBayesNet.cpp:225
gtsam::FactorGraph::size
size_t size() const
Definition: FactorGraph.h:297
gtsam::HybridBayesNet::push_back
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
Definition: HybridBayesNet.h:76
gtsam::DiscreteConditional
Definition: DiscreteConditional.h:37
different_sigmas::gc
const auto gc
Definition: testHybridBayesNet.cpp:231
HybridGaussianFactorGraph.h
Linearized Hybrid factor graph that uses type erasure.
empty
Definition: test_copy_move.cpp:19
gtsam
traits
Definition: SFMdata.h:40
error
static double error
Definition: testRot3.cpp:37
NonlinearFactorGraph.h
Factor Graph consisting of non-linear factors.
gtsam::DiscreteValues
Definition: DiscreteValues.h:34
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
gtsam::Values
Definition: Values.h:65
CHECK
#define CHECK(condition)
Definition: Test.h:108
gtsam::HybridBayesNet::sample
HybridValues sample(const HybridValues &given, std::mt19937_64 *rng) const
Sample from an incomplete BayesNet, given missing variables.
Definition: HybridBayesNet.cpp:153
gtsam::DiscreteKey
std::pair< Key, size_t > DiscreteKey
Definition: DiscreteKey.h:38
std
Definition: BFloat16.h:88
gtsam::HybridValues::discrete
const DiscreteValues & discrete() const
Return the discrete values.
Definition: HybridValues.cpp:57
ratio
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy y set format x g set format y g set format x2 g set format y2 g set format z g set angles radians set nogrid set key title set key left top Right noreverse box linetype linewidth samplen spacing width set nolabel set noarrow set nologscale set logscale x set set pointsize set encoding default set nopolar set noparametric set set set set surface set nocontour set clabel set mapping cartesian set nohidden3d set cntrparam order set cntrparam linear set cntrparam levels auto set cntrparam points set size ratio
Definition: gnuplot_common_settings.hh:44
gtsam::HybridNonlinearFactorGraph::linearize
std::shared_ptr< HybridGaussianFactorGraph > linearize(const Values &continuousValues) const
Linearize all the continuous factors in the HybridNonlinearFactorGraph.
Definition: HybridNonlinearFactorGraph.cpp:139
gtsam::GaussianConditional::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianConditional.h:46
gtsam::VectorValues::size
size_t size() const
Definition: VectorValues.h:130
gtsam::assert_equal
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:41
gtsam::HybridBayesNet::logProbability
double logProbability(const HybridValues &x) const
Definition: BayesNet-inst.h:94
initial
Definition: testScenarioRunner.cpp:148
different_sigmas::prior
const auto prior
Definition: testHybridBayesNet.cpp:238
gtsam::tiny::createHybridBayesNet
HybridBayesNet createHybridBayesNet(size_t num_measurements=1, bool manyModes=false)
Definition: TinyHybridExample.h:39
different_sigmas
Definition: testHybridBayesNet.cpp:230
different_sigmas::hgc
const auto hgc
Definition: testHybridBayesNet.cpp:236
gtsam::HybridBayesNet::shared_ptr
std::shared_ptr< HybridBayesNet > shared_ptr
Definition: HybridBayesNet.h:42
gtsam::HybridBayesNet::toFactorGraph
HybridGaussianFactorGraph toFactorGraph(const VectorValues &measurements) const
Definition: HybridBayesNet.cpp:239
model1
noiseModel::Isotropic::shared_ptr model1
Definition: testEssentialMatrixFactor.cpp:26
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
Z
#define Z
Definition: icosphere.cpp:21
gtsam::Ordering
Definition: inference/Ordering.h:33
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9
gtsam::HybridNonlinearFactor
Implementation of a discrete-conditioned hybrid factor.
Definition: HybridNonlinearFactor.h:58
pybind_wrapper_test_script.other
other
Definition: pybind_wrapper_test_script.py:42
gtsam::GaussianBayesNet
Definition: GaussianBayesNet.h:35
gtsam::FactorGraph::emplace_shared
IsDerived< DERIVEDFACTOR > emplace_shared(Args &&... args)
Emplace a shared pointer to factor of given type.
Definition: FactorGraph.h:153
asiaKey
static const Key asiaKey
Definition: testHybridBayesNet.cpp:44
gtsam::HybridBayesNet::choose
GaussianBayesNet choose(const DiscreteValues &assignment) const
Get the Gaussian Bayes net P(X|M=m) corresponding to a specific assignment m for the discrete variabl...
Definition: HybridBayesNet.cpp:103
gtsam::HybridBayesNet::emplace_shared
void emplace_shared(Args &&...args)
Definition: HybridBayesNet.h:116
HybridConditional.h
px
RealScalar RealScalar * px
Definition: level1_cplx_impl.h:28
M
Matrix< RealScalar, Dynamic, Dynamic > M
Definition: bench_gemm.cpp:51


gtsam
Author(s):
autogenerated on Sun Dec 22 2024 04:16:29