testHybridBayesNet.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
21 #include <gtsam/base/Testable.h>
29 
30 #include "Switching.h"
31 #include "TinyHybridExample.h"
32 
33 // Include for test suite
35 
36 #include <memory>
37 #include <numeric>
38 
39 using namespace std;
40 using namespace gtsam;
41 
45 
46 static const Key asiaKey = 0;
47 static const DiscreteKey Asia(asiaKey, 2);
48 
49 /* ****************************************************************************/
50 // Test creation of a pure discrete Bayes net.
51 TEST(HybridBayesNet, Creation) {
54 
56  CHECK(bayesNet.at(0)->asDiscrete());
57  EXPECT(assert_equal(expected, *bayesNet.at(0)->asDiscrete()));
58 }
59 
60 /* ****************************************************************************/
61 // Test adding a Bayes net to another one.
65 
67  other.add(bayesNet);
69 }
70 
71 /* ****************************************************************************/
72 // Test API for a pure discrete Bayes net P(Asia).
73 TEST(HybridBayesNet, EvaluatePureDiscrete) {
75  const auto pAsia = std::make_shared<DiscreteConditional>(Asia, "4/6");
76  bayesNet.push_back(pAsia);
77  HybridValues zero{{}, {{asiaKey, 0}}}, one{{}, {{asiaKey, 1}}};
78 
79  // choose
81  EXPECT(assert_equal(empty, bayesNet.choose(zero.discrete()), 1e-9));
82 
83  // evaluate
86 
87  // optimize
89  EXPECT(assert_equal(VectorValues{}, bayesNet.optimize(one.discrete())));
90 
91  // sample
92  std::mt19937_64 rng(42);
94  EXPECT(assert_equal(one, bayesNet.sample(one, &rng)));
96 
97  // prune
99  EXPECT_LONGS_EQUAL(1, bayesNet.prune(1).at(0)->size());
100 
101  // error
103  EXPECT_DOUBLES_EQUAL(-log(0.6), bayesNet.error(one), 1e-9);
104 
105  // errorTree
108 
109  // logProbability
112 
113  // discretePosterior
114  AlgebraicDecisionTree<Key> expectedPosterior(asiaKey, 0.4, 0.6);
115  EXPECT(assert_equal(expectedPosterior, bayesNet.discretePosterior({})));
116 
117  // toFactorGraph
118  HybridGaussianFactorGraph expectedFG{pAsia}, fg = bayesNet.toFactorGraph({});
119  EXPECT(assert_equal(expectedFG, fg));
120 }
121 
122 /* ****************************************************************************/
123 // Test API for a tiny hybrid Bayes net.
125  auto bayesNet = tiny::createHybridBayesNet(); // P(z|x,mode)P(x)P(mode)
127 
128  const VectorValues vv{{Z(0), Vector1(5.0)}, {X(0), Vector1(5.0)}};
129  HybridValues zero{vv, {{M(0), 0}}}, one{vv, {{M(0), 1}}};
130 
131  // Check Invariants for components
133  GaussianConditional::shared_ptr gc0 = hgc->choose(zero.discrete()),
134  gc1 = hgc->choose(one.discrete());
135  GaussianConditional::shared_ptr px = bayesNet.at(1)->asGaussian();
136  GaussianConditional::CheckInvariants(*gc0, vv);
137  GaussianConditional::CheckInvariants(*gc1, vv);
138  GaussianConditional::CheckInvariants(*px, vv);
139  HybridGaussianConditional::CheckInvariants(*hgc, zero);
140  HybridGaussianConditional::CheckInvariants(*hgc, one);
141 
142  // choose
143  GaussianBayesNet expectedChosen;
144  expectedChosen.push_back(gc0);
145  expectedChosen.push_back(px);
146  auto chosen0 = bayesNet.choose(zero.discrete());
147  auto chosen1 = bayesNet.choose(one.discrete());
148  EXPECT(assert_equal(expectedChosen, chosen0, 1e-9));
149 
150  // logProbability
151  const double logP0 = chosen0.logProbability(vv) + log(0.4); // 0.4 is prior
152  const double logP1 = chosen1.logProbability(vv) + log(0.6); // 0.6 is prior
155 
156  // evaluate
158 
159  // optimize
161  EXPECT(assert_equal(chosen0.optimize(), bayesNet.optimize(zero.discrete())));
162 
163  // sample. Not deterministic !!! TODO(Frank): figure out why
164  // std::mt19937_64 rng(42);
165  // EXPECT(assert_equal({{M(0), 1}}, bayesNet.sample(&rng).discrete()));
166 
167  // prune
168  auto pruned = bayesNet.prune(1);
169  CHECK(pruned.at(0)->asHybrid());
170  EXPECT_LONGS_EQUAL(1, pruned.at(0)->asHybrid()->nrComponents());
171  EXPECT(!pruned.equals(bayesNet));
172 
173  // error
174  const double error0 = chosen0.error(vv) + gc0->negLogConstant() -
175  px->negLogConstant() - log(0.4);
176  const double error1 = chosen1.error(vv) + gc1->negLogConstant() -
177  px->negLogConstant() - log(0.6);
178  // print errors:
179  EXPECT_DOUBLES_EQUAL(error0, bayesNet.error(zero), 1e-9);
180  EXPECT_DOUBLES_EQUAL(error1, bayesNet.error(one), 1e-9);
181  EXPECT_DOUBLES_EQUAL(error0 + logP0, error1 + logP1, 1e-9);
182 
183  // errorTree
184  AlgebraicDecisionTree<Key> expected(M(0), error0, error1);
186 
187  // discretePosterior
188  // We have: P(z|x,mode)P(x)P(mode). When we condition on z and x, we get
189  // P(mode|z,x) \propto P(z|x,mode)P(x)P(mode)
190  // Normalizing this yields posterior P(mode|z,x) = {0.8, 0.2}
191  double q0 = std::exp(logP0), q1 = std::exp(logP1), sum = q0 + q1;
192  AlgebraicDecisionTree<Key> expectedPosterior(M(0), q0 / sum, q1 / sum);
193  EXPECT(assert_equal(expectedPosterior, bayesNet.discretePosterior(vv)));
194 
195  // toFactorGraph
196  auto fg = bayesNet.toFactorGraph({{Z(0), Vector1(5.0)}});
197  EXPECT_LONGS_EQUAL(3, fg.size());
198 
199  // Create the product factor for eliminating x0:
200  HybridGaussianFactorGraph factors_x0;
201  factors_x0.push_back(fg.at(0));
202  factors_x0.push_back(fg.at(1));
203  auto productFactor = factors_x0.collectProductFactor();
204 
205  // Check that scalars are 0 and 1.79 (regression)
206  EXPECT_DOUBLES_EQUAL(0.0, productFactor({{M(0), 0}}).second, 1e-9);
207  EXPECT_DOUBLES_EQUAL(1.791759, productFactor({{M(0), 1}}).second, 1e-5);
208 
209  // Call eliminate and check scalar:
210  auto result = factors_x0.eliminate({X(0)});
211  auto df = std::dynamic_pointer_cast<DecisionTreeFactor>(result.second);
212 
213  // Check that the ratio of probPrime to evaluate is the same for all modes.
214  std::vector<double> ratio(2);
215  ratio[0] = std::exp(-fg.error(zero)) / bayesNet.evaluate(zero);
216  ratio[1] = std::exp(-fg.error(one)) / bayesNet.evaluate(one);
217  EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8);
218 
219  // Better and more general test:
220  // Since ϕ(M, x) \propto P(M,x|z) the discretePosteriors should agree
221  q0 = std::exp(-fg.error(zero));
222  q1 = std::exp(-fg.error(one));
223  sum = q0 + q1;
224  EXPECT(assert_equal(expectedPosterior, {M(0), q0 / sum, q1 / sum}));
225  VectorValues xv{{X(0), Vector1(5.0)}};
226  auto fgPosterior = fg.discretePosterior(xv);
227  EXPECT(assert_equal(expectedPosterior, fgPosterior));
228 }
229 
230 /* ****************************************************************************/
231 // Hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
232 namespace different_sigmas {
233 const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1),
234  Vector1(-4.0), 5.0);
235 
236 const std::vector<std::pair<Vector, double>> parms{{Vector1(5), 2.0},
237  {Vector1(2), 3.0}};
238 const auto hgc = std::make_shared<HybridGaussianConditional>(Asia, X(1), parms);
239 
240 const auto prior = std::make_shared<DiscreteConditional>(Asia, "99/1");
241 auto wrap = [](const auto& c) {
242  return std::make_shared<HybridConditional>(c);
243 };
245 
246 // Create values at which to evaluate.
247 HybridValues values{{{X(0), Vector1(-6)}, {X(1), Vector1(1)}}, {{asiaKey, 0}}};
248 } // namespace different_sigmas
249 
250 /* ****************************************************************************/
251 // Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
252 TEST(HybridBayesNet, evaluateHybrid) {
253  using namespace different_sigmas;
254 
255  const double conditionalProbability = gc->evaluate(values.continuous());
256  const double mixtureProbability = hgc->evaluate(values);
257  EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99,
258  bayesNet.evaluate(values), 1e-9);
259 }
260 
261 /* ****************************************************************************/
262 // Test choosing an assignment of conditionals
264  Switching s(4);
265 
266  const Ordering ordering(s.linearizationPoint.keys());
267 
268  const auto [hybridBayesNet, remainingFactorGraph] =
269  s.linearizedFactorGraph().eliminatePartialSequential(ordering);
270 
271  DiscreteValues assignment;
272  assignment[M(0)] = 1;
273  assignment[M(1)] = 1;
274  assignment[M(2)] = 0;
275 
276  GaussianBayesNet gbn = hybridBayesNet->choose(assignment);
277 
279 
280  EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment),
281  *gbn.at(0)));
282  EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment),
283  *gbn.at(1)));
284  EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment),
285  *gbn.at(2)));
286  EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment),
287  *gbn.at(3)));
288 }
289 
290 /* ****************************************************************************/
291 // Test Bayes net optimize
292 TEST(HybridBayesNet, OptimizeAssignment) {
293  Switching s(4);
294 
295  const Ordering ordering(s.linearizationPoint.keys());
296 
297  const auto [hybridBayesNet, remainingFactorGraph] =
298  s.linearizedFactorGraph().eliminatePartialSequential(ordering);
299 
300  DiscreteValues assignment;
301  assignment[M(0)] = 1;
302  assignment[M(1)] = 1;
303  assignment[M(2)] = 1;
304 
305  VectorValues delta = hybridBayesNet->optimize(assignment);
306 
307  // The linearization point has the same value as the key index,
308  // e.g. X(0) = 1, X(1) = 2,
309  // but the factors specify X(k) = k-1, so delta should be -1.
310  VectorValues expected_delta;
311  expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
312  expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
313  expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
314  expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
315 
316  EXPECT(assert_equal(expected_delta, delta));
317 }
318 
319 /* ****************************************************************************/
320 // Test Bayes net optimize
321 TEST(HybridBayesNet, Optimize) {
322  Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1");
323 
324  HybridBayesNet::shared_ptr hybridBayesNet =
325  s.linearizedFactorGraph().eliminateSequential();
326 
327  HybridValues delta = hybridBayesNet->optimize();
328 
329  // NOTE: The true assignment is 111, but the discrete priors cause 101
330  DiscreteValues expectedAssignment;
331  expectedAssignment[M(0)] = 1;
332  expectedAssignment[M(1)] = 1;
333  expectedAssignment[M(2)] = 1;
334  EXPECT(assert_equal(expectedAssignment, delta.discrete()));
335 
336  VectorValues expectedValues;
337  expectedValues.insert(X(0), -Vector1::Ones());
338  expectedValues.insert(X(1), -Vector1::Ones());
339  expectedValues.insert(X(2), -Vector1::Ones());
340  expectedValues.insert(X(3), -Vector1::Ones());
341 
342  EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
343 }
344 
345 /* ****************************************************************************/
346 namespace hbn_error {
347 // Create switching network with three continuous variables and two discrete:
348 // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1)
349 Switching s(3);
350 
351 // The true discrete assignment
352 const DiscreteValues discrete_values{{M(0), 1}, {M(1), 1}};
353 
354 } // namespace hbn_error
355 
356 /* ****************************************************************************/
357 // Test Bayes net error and log-probability
359  using namespace hbn_error;
360 
361  HybridBayesNet::shared_ptr posterior =
362  s.linearizedFactorGraph().eliminateSequential();
363  EXPECT_LONGS_EQUAL(5, posterior->size());
364 
365  // Optimize
366  HybridValues delta = posterior->optimize();
367 
368  // Verify discrete posterior at optimal value sums to 1.
369  auto discretePosterior = posterior->discretePosterior(delta.continuous());
370  EXPECT_DOUBLES_EQUAL(1.0, discretePosterior.sum(), 1e-9);
371 
372  // Regression test on discrete posterior at optimal value.
373  std::vector<double> leaves = {0.095516068, 0.31800092, 0.27798511, 0.3084979};
374  AlgebraicDecisionTree<Key> expected(s.modes, leaves);
375  EXPECT(assert_equal(expected, discretePosterior, 1e-6));
376 
377  // Verify logProbability computation and check specific logProbability value
378  const HybridValues hybridValues{delta.continuous(), discrete_values};
379  double logProbability = 0;
380  logProbability += posterior->at(0)->asHybrid()->logProbability(hybridValues);
381  logProbability += posterior->at(1)->asHybrid()->logProbability(hybridValues);
382  logProbability += posterior->at(2)->asHybrid()->logProbability(hybridValues);
383  logProbability +=
384  posterior->at(3)->asDiscrete()->logProbability(hybridValues);
385  logProbability +=
386  posterior->at(4)->asDiscrete()->logProbability(hybridValues);
387  EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues),
388  1e-9);
389 
390  double negLogConstant = posterior->negLogConstant(discrete_values);
391 
392  // The sum of all the mode densities
393  double normalizer =
394  AlgebraicDecisionTree<Key>(posterior->errorTree(delta.continuous()),
395  [](double error) { return exp(-error); })
396  .sum();
397 
398  // Check agreement with discrete posterior
399  double density = exp(logProbability + negLogConstant) / normalizer;
400  EXPECT_DOUBLES_EQUAL(density, discretePosterior(discrete_values), 1e-6);
401 }
402 
403 /* ****************************************************************************/
404 // Test Bayes net pruning
406  Switching s(3);
407 
408  HybridBayesNet::shared_ptr posterior =
409  s.linearizedFactorGraph().eliminateSequential();
410  EXPECT_LONGS_EQUAL(5, posterior->size());
411 
412  // Call Max-Product to get MAP
413  HybridValues delta = posterior->optimize();
414 
415  // Prune the Bayes net
416  auto prunedBayesNet = posterior->prune(2);
417 
418  // Test if Max-Product gives the same result as unpruned version
419  HybridValues pruned_delta = prunedBayesNet.optimize();
420  EXPECT(assert_equal(delta.discrete(), pruned_delta.discrete()));
421  EXPECT(assert_equal(delta.continuous(), pruned_delta.continuous()));
422 }
423 
424 /* ****************************************************************************/
425 // Test Bayes net pruning and dead node removal
426 TEST(HybridBayesNet, RemoveDeadNodes) {
427  Switching s(3);
428 
429  HybridBayesNet::shared_ptr posterior =
430  s.linearizedFactorGraph().eliminateSequential();
431  EXPECT_LONGS_EQUAL(5, posterior->size());
432 
433  // Call Max-Product to get MAP
434  HybridValues delta = posterior->optimize();
435 
436  // Prune the Bayes net
437  const double pruneDeadVariables = 0.99;
438  auto prunedBayesNet = posterior->prune(2, pruneDeadVariables);
439 
440  // First conditional is still the same: P( x0 | x1 m0)
441  EXPECT(prunedBayesNet.at(0)->isHybrid());
442 
443  // Check that hybrid conditional that only depend on M1
444  // is now Gaussian and not Hybrid
445  EXPECT(prunedBayesNet.at(1)->isContinuous());
446 
447  // Third conditional is still Hybrid: P( x1 | m0 m1) -> P( x1 | m0)
448  EXPECT(prunedBayesNet.at(0)->isHybrid());
449 
450  // Check that discrete joint only has M0 and not (M0, M1)
451  // since M0 is removed
452  auto joint = prunedBayesNet.at(3)->asDiscrete();
453  EXPECT(joint);
454  EXPECT(joint->keys() == KeyVector{M(0)});
455 }
456 
457 /* ****************************************************************************/
458 // Test Bayes net error and log-probability after pruning
459 TEST(HybridBayesNet, ErrorAfterPruning) {
460  using namespace hbn_error;
461 
462  HybridBayesNet::shared_ptr posterior =
463  s.linearizedFactorGraph().eliminateSequential();
464  EXPECT_LONGS_EQUAL(5, posterior->size());
465 
466  // Optimize
467  HybridValues delta = posterior->optimize();
468 
469  // Prune and get probabilities
470  HybridBayesNet prunedBayesNet = posterior->prune(2);
471  AlgebraicDecisionTree<Key> prunedTree =
472  prunedBayesNet.discretePosterior(delta.continuous());
473 
474  // Regression test on pruned probability tree
475  std::vector<double> pruned_leaves = {0.0, 0.50758422, 0.0, 0.49241578};
476  AlgebraicDecisionTree<Key> expected_pruned(s.modes, pruned_leaves);
477  EXPECT(assert_equal(expected_pruned, prunedTree, 1e-6));
478 
479  // Regression to check specific logProbability value
480  const HybridValues hybridValues{delta.continuous(), discrete_values};
481  double pruned_logProbability = 0;
482  pruned_logProbability +=
483  prunedBayesNet.at(0)->asHybrid()->logProbability(hybridValues);
484  pruned_logProbability +=
485  prunedBayesNet.at(1)->asHybrid()->logProbability(hybridValues);
486  pruned_logProbability +=
487  prunedBayesNet.at(2)->asHybrid()->logProbability(hybridValues);
488  pruned_logProbability +=
489  prunedBayesNet.at(3)->asDiscrete()->logProbability(hybridValues);
490 
491  double pruned_negLogConstant = prunedBayesNet.negLogConstant(discrete_values);
492 
493  // The sum of all the mode densities
494  double pruned_normalizer =
495  AlgebraicDecisionTree<Key>(prunedBayesNet.errorTree(delta.continuous()),
496  [](double error) { return exp(-error); })
497  .sum();
498  double pruned_density =
499  exp(pruned_logProbability + pruned_negLogConstant) / pruned_normalizer;
500  EXPECT_DOUBLES_EQUAL(pruned_density, prunedTree(discrete_values), 1e-9);
501 }
502 
503 /* ****************************************************************************/
504 // Test Bayes net updateDiscreteConditionals
505 TEST(HybridBayesNet, UpdateDiscreteConditionals) {
506  Switching s(4);
507 
508  HybridBayesNet::shared_ptr posterior =
509  s.linearizedFactorGraph().eliminateSequential();
510  EXPECT_LONGS_EQUAL(7, posterior->size());
511 
512  DiscreteConditional joint;
513  for (auto&& conditional : posterior->discreteMarginal()) {
514  joint = joint * (*conditional);
515  }
516 
517  size_t maxNrLeaves = 3;
518  DiscreteConditional prunedDecisionTree(joint);
519  prunedDecisionTree.prune(maxNrLeaves);
520 
521 #ifdef GTSAM_DT_MERGING
522  EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/,
523  prunedDecisionTree.nrLeaves());
524 #else
525  EXPECT_LONGS_EQUAL(8 /*full tree*/, prunedDecisionTree.nrLeaves());
526 #endif
527 
528  // regression
529  // NOTE(Frank): I had to include *three* non-zeroes here now.
530  DecisionTreeFactor::ADT potentials(
531  s.modes,
532  std::vector<double>{0, 0, 0, 0.28739288, 0, 0.43106901, 0, 0.2815381});
533  DiscreteConditional expectedConditional(3, s.modes, potentials);
534 
535  // Prune!
536  auto pruned = posterior->prune(maxNrLeaves);
537 
538  // Functor to verify values against the expectedConditional
539  auto checker = [&](const Assignment<Key>& assignment,
540  double probability) -> double {
541  // typecast so we can use this to get probability value
542  DiscreteValues choices(assignment);
543  if (prunedDecisionTree(choices) == 0) {
544  EXPECT_DOUBLES_EQUAL(0.0, probability, 1e-9);
545  } else {
546  EXPECT_DOUBLES_EQUAL(expectedConditional(choices), probability, 1e-6);
547  }
548  return 0.0;
549  };
550 
551  // Get the pruned discrete conditionals as an AlgebraicDecisionTree
552  CHECK(pruned.at(4)->asDiscrete());
553  auto pruned_discrete_conditionals = pruned.at(4)->asDiscrete();
554  auto discrete_conditional_tree =
555  std::dynamic_pointer_cast<DecisionTreeFactor::ADT>(
556  pruned_discrete_conditionals);
557 
558  // The checker functor verifies the values for us.
559  discrete_conditional_tree->apply(checker);
560 }
561 
562 /* ****************************************************************************/
563 // Test HybridBayesNet sampling.
564 TEST(HybridBayesNet, Sampling) {
566 
567  auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0));
569 
570  auto zero_motion =
571  std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
572  auto one_motion =
573  std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
575  DiscreteKey(M(0), 2),
576  std::vector<NoiseModelFactor::shared_ptr>{zero_motion, one_motion});
577 
578  DiscreteKey mode(M(0), 2);
580 
581  Values initial;
582  double z0 = 0.0, z1 = 1.0;
583  initial.insert<double>(X(0), z0);
584  initial.insert<double>(X(1), z1);
585 
586  // Create the factor graph from the nonlinear factor graph.
588  // Eliminate into BN
589  HybridBayesNet::shared_ptr bn = fg->eliminateSequential();
590 
591  // Set up sampling
592  std::mt19937_64 gen(11);
593 
594  // Initialize containers for computing the mean values.
595  vector<double> discrete_samples;
596  VectorValues average_continuous;
597 
598  size_t num_samples = 1000;
599  for (size_t i = 0; i < num_samples; i++) {
600  // Sample
601  HybridValues sample = bn->sample(&gen);
602 
603  discrete_samples.push_back(sample.discrete().at(M(0)));
604 
605  if (i == 0) {
606  average_continuous.insert(sample.continuous());
607  } else {
608  average_continuous += sample.continuous();
609  }
610  }
611 
612  EXPECT_LONGS_EQUAL(2, average_continuous.size());
613  EXPECT_LONGS_EQUAL(num_samples, discrete_samples.size());
614 
615  // regression for specific RNG seed
616  double discrete_sum =
617  std::accumulate(discrete_samples.begin(), discrete_samples.end(),
618  decltype(discrete_samples)::value_type(0));
619  EXPECT_DOUBLES_EQUAL(0.477, discrete_sum / num_samples, 1e-9);
620 
622  // regression for specific RNG seed
623 #if __APPLE__ || _WIN32
624  expected.insert({X(0), Vector1(-0.0131207162712)});
625  expected.insert({X(1), Vector1(-0.499026377568)});
626 #elif __linux__
627  expected.insert({X(0), Vector1(-0.00799425182219)});
628  expected.insert({X(1), Vector1(-0.526463854268)});
629 #endif
630 
631  EXPECT(assert_equal(expected, average_continuous.scale(1.0 / num_samples)));
632 }
633 
634 /* ****************************************************************************/
635 // Test hybrid gaussian factor graph errorTree when
636 // there is a HybridConditional in the graph
637 TEST(HybridBayesNet, ErrorTreeWithConditional) {
638  using symbol_shorthand::F;
639 
640  Key z0 = Z(0), f01 = F(0);
641  Key x0 = X(0), x1 = X(1);
642 
643  HybridBayesNet hbn;
644 
645  auto prior_model = noiseModel::Isotropic::Sigma(1, 1e-1);
646  auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0);
647 
648  // Set a prior P(x0) at x0=0
649  hbn.emplace_shared<GaussianConditional>(x0, Vector1(0.0), I_1x1, prior_model);
650 
651  // Add measurement P(z0 | x0)
652  hbn.emplace_shared<GaussianConditional>(z0, Vector1(0.0), -I_1x1, x0, I_1x1,
653  measurement_model);
654 
655  // Add hybrid motion model
656  double mu = 0.0;
657  double sigma0 = 1e2, sigma1 = 1e-2;
658  auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
659  auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
660  auto c0 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
661  x0, -I_1x1, model0),
662  c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
663  x0, -I_1x1, model1);
664  DiscreteKey m1(M(2), 2);
665  hbn.emplace_shared<HybridGaussianConditional>(m1, std::vector{c0, c1});
666 
667  // Discrete uniform prior.
668  hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
669 
670  VectorValues given;
671  given.insert(z0, Vector1(0.0));
672  given.insert(f01, Vector1(0.0));
673  auto gfg = hbn.toFactorGraph(given);
674 
676  vv.insert(x0, Vector1(1.0));
677  vv.insert(x1, Vector1(2.0));
678  AlgebraicDecisionTree<Key> errorTree = gfg.errorTree(vv);
679 
680  // regression
681  AlgebraicDecisionTree<Key> expected(m1, 60.028538, 5050.8181);
682  EXPECT(assert_equal(expected, errorTree, 1e-4));
683 }
684 
685 /* ************************************************************************* */
686 int main() {
687  TestResult tr;
688  return TestRegistry::runAllTests(tr);
689 }
690 /* ************************************************************************* */
TestRegistry::runAllTests
static int runAllTests(TestResult &result)
Definition: TestRegistry.cpp:27
different_sigmas::parms
const std::vector< std::pair< Vector, double > > parms
Definition: testHybridBayesNet.cpp:236
gtsam::HybridBayesNet::equals
bool equals(const This &fg, double tol=1e-9) const
GTSAM-style equals.
Definition: HybridBayesNet.cpp:40
gtsam::HybridGaussianFactorGraph::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to This
Definition: HybridGaussianFactorGraph.h:120
TableDistribution.h
simple_graph::sigma1
double sigma1
Definition: testJacobianFactor.cpp:193
hbn_error
Definition: testHybridBayesNet.cpp:346
gtsam::HybridValues
Definition: HybridValues.h:37
rng
static std::mt19937 rng
Definition: timeFactorOverhead.cpp:31
Asia
static const DiscreteKey Asia(asiaKey, 2)
gtsam::HybridGaussianConditional::shared_ptr
std::shared_ptr< This > shared_ptr
Definition: HybridGaussianConditional.h:60
s
RealScalar s
Definition: level1_cplx_impl.h:126
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
gtsam::FactorGraph::error
double error(const HybridValues &values) const
Definition: FactorGraph-inst.h:66
EXPECT_LONGS_EQUAL
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
Testable.h
Concept check for values that can be used in unit tests.
EXPECT
#define EXPECT(condition)
Definition: Test.h:150
TestHarness.h
gtsam::HybridBayesNet
Definition: HybridBayesNet.h:37
gtsam::HybridNonlinearFactorGraph
Definition: HybridNonlinearFactorGraph.h:33
mu
double mu
Definition: testBoundingConstraint.cpp:37
gtsam::HybridBayesNet::evaluate
double evaluate(const HybridValues &values) const
Evaluate hybrid probability density for given HybridValues.
Definition: HybridBayesNet.cpp:264
gtsam::VectorValues::insert
iterator insert(const std::pair< Key, Vector > &key_value)
Definition: VectorValues.cpp:90
c
Scalar Scalar * c
Definition: benchVecAdd.cpp:17
z1
Point2 z1
Definition: testTriangulationFactor.cpp:45
gtsam::DiscreteDistribution
Definition: DiscreteDistribution.h:33
initial
Values initial
Definition: OdometryOptimize.cpp:2
Switching.h
hbn_error::discrete_values
const DiscreteValues discrete_values
Definition: testHybridBayesNet.cpp:352
wrap
mxArray * wrap(const Class &value)
Definition: matlab.h:126
gtsam::PriorFactor
Definition: nonlinear/PriorFactor.h:30
m1
Matrix3d m1
Definition: IOFormat.cpp:2
HybridBayesNet.h
A Bayes net of Gaussian Conditionals indexed by discrete keys.
different_sigmas::values
HybridValues values
Definition: testHybridBayesNet.cpp:247
log
const EIGEN_DEVICE_FUNC LogReturnType log() const
Definition: ArrayCwiseUnaryOps.h:128
X
#define X
Definition: icosphere.cpp:20
vv
static const VectorValues vv
Definition: testHybridGaussianConditional.cpp:45
exp
const EIGEN_DEVICE_FUNC ExpReturnType exp() const
Definition: ArrayCwiseUnaryOps.h:97
gtsam::KeyVector
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:92
result
Values result
Definition: OdometryOptimize.cpp:8
main
int main()
Definition: testHybridBayesNet.cpp:686
test_motion::noise_model
auto noise_model
Definition: testHybridNonlinearFactorGraph.cpp:120
gtsam::HybridGaussianFactorGraph::collectProductFactor
HybridGaussianProductFactor collectProductFactor() const
Create a decision tree of factor graphs out of this hybrid factor graph.
Definition: HybridGaussianFactorGraph.cpp:180
gtsam::HybridValues::continuous
const VectorValues & continuous() const
Return the multi-dimensional vector values.
Definition: HybridValues.cpp:54
gtsam::AlgebraicDecisionTree< Key >
c1
static double c1
Definition: airy.c:54
gtsam::Switching
ϕ(X(0)) .. ϕ(X(k),X(k+1)) .. ϕ(X(k);z_k) .. ϕ(M(0)) .. ϕ(M(K-3),M(K-2))
Definition: Switching.h:124
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
gtsam::HybridBayesNet::optimize
HybridValues optimize() const
Solve the HybridBayesNet by first computing the MPE of all the discrete variables and then optimizing...
Definition: HybridBayesNet.cpp:162
Vector1
Eigen::Matrix< double, 1, 1 > Vector1
Definition: testEvent.cpp:33
different_sigmas::bayesNet
const HybridBayesNet bayesNet
Definition: testHybridBayesNet.cpp:244
gtsam.examples.PlanarManipulatorExample.delta
def delta(g0, g1)
Definition: PlanarManipulatorExample.py:45
gtsam::VectorValues
Definition: VectorValues.h:74
x1
Pose3 x1
Definition: testPose3.cpp:663
mode
static const DiscreteKey mode(modeKey, 2)
gtsam::HybridBayesNet::errorTree
AlgebraicDecisionTree< Key > errorTree(const VectorValues &continuousValues) const
Compute the negative log posterior log P'(M|x) of all assignments up to a constant,...
Definition: HybridBayesNet.cpp:218
DiscreteFactor.h
gtsam::VectorValues::scale
VectorValues scale(const double a) const
Definition: VectorValues.cpp:369
TinyHybridExample.h
gtsam::GaussianConditional
Definition: GaussianConditional.h:40
zero
EIGEN_DONT_INLINE Scalar zero()
Definition: svd_common.h:296
cholesky::expected
Matrix expected
Definition: testMatrix.cpp:971
gtsam::HybridGaussianConditional
A conditional of gaussian conditionals indexed by discrete variables, as part of a Bayes Network....
Definition: HybridGaussianConditional.h:55
x0
static Symbol x0('x', 0)
gtsam::HybridGaussianFactorGraph::eliminate
std::pair< std::shared_ptr< HybridConditional >, std::shared_ptr< Factor > > eliminate(const Ordering &keys) const
Eliminate the given continuous keys.
Definition: HybridGaussianFactorGraph.cpp:449
gtsam::HybridGaussianFactorGraph
Definition: HybridGaussianFactorGraph.h:106
density
Definition: testGaussianConditional.cpp:127
gtsam::Assignment< Key >
TEST
TEST(HybridBayesNet, Creation)
Definition: testHybridBayesNet.cpp:51
HybridBayesTree.h
Hybrid Bayes Tree, the result of eliminating a HybridJunctionTree.
sampling::gbn
static const GaussianBayesNet gbn
Definition: testGaussianBayesNet.cpp:171
gtsam::symbol_shorthand::F
Key F(std::uint64_t j)
Definition: inference/Symbol.h:153
EXPECT_DOUBLES_EQUAL
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
ordering
static enum @1096 ordering
TestResult
Definition: TestResult.h:26
gtsam::AlgebraicDecisionTree::sum
double sum() const
Compute sum of all values.
Definition: AlgebraicDecisionTree.h:211
gtsam::HybridBayesNet::discretePosterior
AlgebraicDecisionTree< Key > discretePosterior(const VectorValues &continuousValues) const
Compute normalized posterior P(M|X=x) and return as a tree.
Definition: HybridBayesNet.cpp:255
gtsam::FactorGraph::size
size_t size() const
Definition: FactorGraph.h:297
gtsam::HybridBayesNet::push_back
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
Definition: HybridBayesNet.h:76
gtsam::DiscreteConditional
Definition: DiscreteConditional.h:37
different_sigmas::gc
const auto gc
Definition: testHybridBayesNet.cpp:233
HybridGaussianFactorGraph.h
Linearized Hybrid factor graph that uses type erasure.
empty
Definition: test_copy_move.cpp:19
gtsam
traits
Definition: SFMdata.h:40
error
static double error
Definition: testRot3.cpp:37
NonlinearFactorGraph.h
Factor Graph consisting of non-linear factors.
gtsam::DiscreteValues
Definition: DiscreteValues.h:34
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
gtsam::Values
Definition: Values.h:65
CHECK
#define CHECK(condition)
Definition: Test.h:108
gtsam::HybridBayesNet::sample
HybridValues sample(const HybridValues &given, std::mt19937_64 *rng) const
Sample from an incomplete BayesNet, given missing variables.
Definition: HybridBayesNet.cpp:183
gtsam::DiscreteKey
std::pair< Key, size_t > DiscreteKey
Definition: DiscreteKey.h:38
std
Definition: BFloat16.h:88
gtsam::HybridValues::discrete
const DiscreteValues & discrete() const
Return the discrete values.
Definition: HybridValues.cpp:57
ratio
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy y set format x g set format y g set format x2 g set format y2 g set format z g set angles radians set nogrid set key title set key left top Right noreverse box linetype linewidth samplen spacing width set nolabel set noarrow set nologscale set logscale x set set pointsize set encoding default set nopolar set noparametric set set set set surface set nocontour set clabel set mapping cartesian set nohidden3d set cntrparam order set cntrparam linear set cntrparam levels auto set cntrparam points set size ratio
Definition: gnuplot_common_settings.hh:44
gtsam::HybridNonlinearFactorGraph::linearize
std::shared_ptr< HybridGaussianFactorGraph > linearize(const Values &continuousValues) const
Linearize all the continuous factors in the HybridNonlinearFactorGraph.
Definition: HybridNonlinearFactorGraph.cpp:139
gtsam::GaussianConditional::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianConditional.h:46
gtsam::VectorValues::size
size_t size() const
Definition: VectorValues.h:133
gtsam::assert_equal
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:41
gtsam::DecisionTree::nrLeaves
size_t nrLeaves() const
Return the number of leaves in the tree.
Definition: DecisionTree-inl.h:932
gtsam::HybridBayesNet::logProbability
double logProbability(const HybridValues &x) const
Definition: BayesNet-inst.h:94
initial
Definition: testScenarioRunner.cpp:148
different_sigmas::prior
const auto prior
Definition: testHybridBayesNet.cpp:240
gtsam::tiny::createHybridBayesNet
HybridBayesNet createHybridBayesNet(size_t num_measurements=1, bool manyModes=false)
Definition: TinyHybridExample.h:39
different_sigmas
Definition: testHybridBayesNet.cpp:232
different_sigmas::hgc
const auto hgc
Definition: testHybridBayesNet.cpp:238
gtsam::HybridBayesNet::shared_ptr
std::shared_ptr< HybridBayesNet > shared_ptr
Definition: HybridBayesNet.h:42
gtsam::HybridBayesNet::prune
HybridBayesNet prune(size_t maxNrLeaves, const std::optional< double > &marginalThreshold={}, DiscreteValues *fixedValues=nullptr) const
Prune the Bayes Net such that we have at most maxNrLeaves leaves.
Definition: HybridBayesNet.cpp:45
gtsam::HybridBayesNet::toFactorGraph
HybridGaussianFactorGraph toFactorGraph(const VectorValues &measurements) const
Definition: HybridBayesNet.cpp:269
model1
noiseModel::Isotropic::shared_ptr model1
Definition: testEssentialMatrixFactor.cpp:26
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
Z
#define Z
Definition: icosphere.cpp:21
gtsam::Ordering
Definition: inference/Ordering.h:33
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9
gtsam::HybridNonlinearFactor
Implementation of a discrete-conditioned hybrid factor.
Definition: HybridNonlinearFactor.h:58
pybind_wrapper_test_script.other
other
Definition: pybind_wrapper_test_script.py:42
gtsam::GaussianBayesNet
Definition: GaussianBayesNet.h:35
gtsam::FactorGraph::emplace_shared
IsDerived< DERIVEDFACTOR > emplace_shared(Args &&... args)
Emplace a shared pointer to factor of given type.
Definition: FactorGraph.h:153
gtsam::DiscreteConditional::prune
virtual void prune(size_t maxNrAssignments)
Prune the conditional.
Definition: DiscreteConditional.cpp:496
asiaKey
static const Key asiaKey
Definition: testHybridBayesNet.cpp:46
gtsam::HybridBayesNet::choose
GaussianBayesNet choose(const DiscreteValues &assignment) const
Get the Gaussian Bayes net P(X|M=m) corresponding to a specific assignment m for the discrete variabl...
Definition: HybridBayesNet.cpp:121
gtsam::HybridBayesNet::emplace_shared
void emplace_shared(Args &&...args)
Definition: HybridBayesNet.h:116
HybridConditional.h
gtsam::HybridBayesNet::negLogConstant
double negLogConstant(const std::optional< DiscreteValues > &discrete) const
Get the negative log of the normalization constant corresponding to the joint density represented by ...
Definition: HybridBayesNet.cpp:231
px
RealScalar RealScalar * px
Definition: level1_cplx_impl.h:28
M
Matrix< RealScalar, Dynamic, Dynamic > M
Definition: bench_gemm.cpp:51


gtsam
Author(s):
autogenerated on Sun Feb 16 2025 04:06:28