testHybridBayesNet.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
24 
25 #include "Switching.h"
26 #include "TinyHybridExample.h"
27 
28 // Include for test suite
30 
31 using namespace std;
32 using namespace gtsam;
33 
38 
39 static const Key asiaKey = 0;
40 static const DiscreteKey Asia(asiaKey, 2);
41 
42 /* ****************************************************************************/
43 // Test creation of a pure discrete Bayes net.
44 TEST(HybridBayesNet, Creation) {
45  HybridBayesNet bayesNet;
46  bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
47 
49  CHECK(bayesNet.at(0)->asDiscrete());
50  EXPECT(assert_equal(expected, *bayesNet.at(0)->asDiscrete()));
51 }
52 
53 /* ****************************************************************************/
54 // Test adding a Bayes net to another one.
56  HybridBayesNet bayesNet;
57  bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
58 
60  other.add(bayesNet);
61  EXPECT(bayesNet.equals(other));
62 }
63 
64 /* ****************************************************************************/
65 // Test evaluate for a pure discrete Bayes net P(Asia).
66 TEST(HybridBayesNet, EvaluatePureDiscrete) {
67  HybridBayesNet bayesNet;
68  bayesNet.emplace_back(new DiscreteConditional(Asia, "4/6"));
70  values.insert(asiaKey, 0);
71  EXPECT_DOUBLES_EQUAL(0.4, bayesNet.evaluate(values), 1e-9);
72 }
73 
74 /* ****************************************************************************/
75 // Test creation of a tiny hybrid Bayes net.
77  auto bn = tiny::createHybridBayesNet();
78  EXPECT_LONGS_EQUAL(3, bn.size());
79 
80  const VectorValues vv{{Z(0), Vector1(5.0)}, {X(0), Vector1(5.0)}};
81  auto fg = bn.toFactorGraph(vv);
82  EXPECT_LONGS_EQUAL(3, fg.size());
83 
84  // Check that the ratio of probPrime to evaluate is the same for all modes.
85  std::vector<double> ratio(2);
86  for (size_t mode : {0, 1}) {
87  const HybridValues hv{vv, {{M(0), mode}}};
88  ratio[mode] = std::exp(-fg.error(hv)) / bn.evaluate(hv);
89  }
90  EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8);
91 }
92 
93 /* ****************************************************************************/
94 // Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
95 TEST(HybridBayesNet, evaluateHybrid) {
96  const auto continuousConditional = GaussianConditional::sharedMeanAndStddev(
97  X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0);
98 
99  const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)),
100  model1 = noiseModel::Diagonal::Sigmas(Vector1(3.0));
101 
102  const auto conditional0 = std::make_shared<GaussianConditional>(
103  X(1), Vector1::Constant(5), I_1x1, model0),
104  conditional1 = std::make_shared<GaussianConditional>(
105  X(1), Vector1::Constant(2), I_1x1, model1);
106 
107  // Create hybrid Bayes net.
108  HybridBayesNet bayesNet;
109  bayesNet.push_back(continuousConditional);
110  bayesNet.emplace_back(
111  new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1}));
112  bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
113 
114  // Create values at which to evaluate.
116  values.insert(asiaKey, 0);
117  values.insert(X(0), Vector1(-6));
118  values.insert(X(1), Vector1(1));
119 
120  const double conditionalProbability =
121  continuousConditional->evaluate(values.continuous());
122  const double mixtureProbability = conditional0->evaluate(values.continuous());
123  EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99,
124  bayesNet.evaluate(values), 1e-9);
125 }
126 
127 /* ****************************************************************************/
128 // Test choosing an assignment of conditionals
130  Switching s(4);
131 
133 
134  const auto [hybridBayesNet, remainingFactorGraph] =
136 
137  DiscreteValues assignment;
138  assignment[M(0)] = 1;
139  assignment[M(1)] = 1;
140  assignment[M(2)] = 0;
141 
142  GaussianBayesNet gbn = hybridBayesNet->choose(assignment);
143 
144  EXPECT_LONGS_EQUAL(4, gbn.size());
145 
146  EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asMixture())(assignment),
147  *gbn.at(0)));
148  EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asMixture())(assignment),
149  *gbn.at(1)));
150  EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asMixture())(assignment),
151  *gbn.at(2)));
152  EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asMixture())(assignment),
153  *gbn.at(3)));
154 }
155 
156 /* ****************************************************************************/
157 // Test Bayes net optimize
158 TEST(HybridBayesNet, OptimizeAssignment) {
159  Switching s(4);
160 
162 
163  const auto [hybridBayesNet, remainingFactorGraph] =
165 
166  DiscreteValues assignment;
167  assignment[M(0)] = 1;
168  assignment[M(1)] = 1;
169  assignment[M(2)] = 1;
170 
171  VectorValues delta = hybridBayesNet->optimize(assignment);
172 
173  // The linearization point has the same value as the key index,
174  // e.g. X(0) = 1, X(1) = 2,
175  // but the factors specify X(k) = k-1, so delta should be -1.
176  VectorValues expected_delta;
177  expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
178  expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
179  expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
180  expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
181 
182  EXPECT(assert_equal(expected_delta, delta));
183 }
184 
185 /* ****************************************************************************/
186 // Test Bayes net optimize
187 TEST(HybridBayesNet, Optimize) {
188  Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1");
189 
190  HybridBayesNet::shared_ptr hybridBayesNet =
191  s.linearizedFactorGraph.eliminateSequential();
192 
193  HybridValues delta = hybridBayesNet->optimize();
194 
195  // NOTE: The true assignment is 111, but the discrete priors cause 101
196  DiscreteValues expectedAssignment;
197  expectedAssignment[M(0)] = 1;
198  expectedAssignment[M(1)] = 1;
199  expectedAssignment[M(2)] = 1;
200  EXPECT(assert_equal(expectedAssignment, delta.discrete()));
201 
202  VectorValues expectedValues;
203  expectedValues.insert(X(0), -Vector1::Ones());
204  expectedValues.insert(X(1), -Vector1::Ones());
205  expectedValues.insert(X(2), -Vector1::Ones());
206  expectedValues.insert(X(3), -Vector1::Ones());
207 
208  EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
209 }
210 
211 /* ****************************************************************************/
212 // Test Bayes net error
213 TEST(HybridBayesNet, Pruning) {
214  Switching s(3);
215 
216  HybridBayesNet::shared_ptr posterior =
218  EXPECT_LONGS_EQUAL(5, posterior->size());
219 
220  HybridValues delta = posterior->optimize();
221  auto actualTree = posterior->evaluate(delta.continuous());
222 
223  // Regression test on density tree.
224  std::vector<DiscreteKey> discrete_keys = {{M(0), 2}, {M(1), 2}};
225  std::vector<double> leaves = {6.1112424, 20.346113, 17.785849, 19.738098};
226  AlgebraicDecisionTree<Key> expected(discrete_keys, leaves);
227  EXPECT(assert_equal(expected, actualTree, 1e-6));
228 
229  // Prune and get probabilities
230  auto prunedBayesNet = posterior->prune(2);
231  auto prunedTree = prunedBayesNet.evaluate(delta.continuous());
232 
233  // Regression test on pruned logProbability tree
234  std::vector<double> pruned_leaves = {0.0, 20.346113, 0.0, 19.738098};
235  AlgebraicDecisionTree<Key> expected_pruned(discrete_keys, pruned_leaves);
236  EXPECT(assert_equal(expected_pruned, prunedTree, 1e-6));
237 
238  // Verify logProbability computation and check specific logProbability value
239  const DiscreteValues discrete_values{{M(0), 1}, {M(1), 1}};
240  const HybridValues hybridValues{delta.continuous(), discrete_values};
241  double logProbability = 0;
242  logProbability += posterior->at(0)->asMixture()->logProbability(hybridValues);
243  logProbability += posterior->at(1)->asMixture()->logProbability(hybridValues);
244  logProbability += posterior->at(2)->asMixture()->logProbability(hybridValues);
245  // NOTE(dellaert): the discrete errors were not added in logProbability tree!
246  logProbability +=
247  posterior->at(3)->asDiscrete()->logProbability(hybridValues);
248  logProbability +=
249  posterior->at(4)->asDiscrete()->logProbability(hybridValues);
250 
251  double density = exp(logProbability);
252  EXPECT_DOUBLES_EQUAL(density, actualTree(discrete_values), 1e-9);
253  EXPECT_DOUBLES_EQUAL(density, prunedTree(discrete_values), 1e-9);
254  EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues),
255  1e-9);
256 }
257 
258 /* ****************************************************************************/
259 // Test Bayes net pruning
261  Switching s(4);
262 
263  HybridBayesNet::shared_ptr posterior =
265  EXPECT_LONGS_EQUAL(7, posterior->size());
266 
267  HybridValues delta = posterior->optimize();
268 
269  auto prunedBayesNet = posterior->prune(2);
270  HybridValues pruned_delta = prunedBayesNet.optimize();
271 
272  EXPECT(assert_equal(delta.discrete(), pruned_delta.discrete()));
273  EXPECT(assert_equal(delta.continuous(), pruned_delta.continuous()));
274 }
275 
276 /* ****************************************************************************/
277 // Test Bayes net updateDiscreteConditionals
278 TEST(HybridBayesNet, UpdateDiscreteConditionals) {
279  Switching s(4);
280 
281  HybridBayesNet::shared_ptr posterior =
283  EXPECT_LONGS_EQUAL(7, posterior->size());
284 
285  size_t maxNrLeaves = 3;
286  auto discreteConditionals = posterior->discreteConditionals();
287  const DecisionTreeFactor::shared_ptr prunedDecisionTree =
288  std::make_shared<DecisionTreeFactor>(
289  discreteConditionals->prune(maxNrLeaves));
290 
291  EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/,
292  prunedDecisionTree->nrLeaves());
293 
294  auto original_discrete_conditionals = *(posterior->at(4)->asDiscrete());
295 
296  // Prune!
297  posterior->prune(maxNrLeaves);
298 
299  // Functor to verify values against the original_discrete_conditionals
300  auto checker = [&](const Assignment<Key>& assignment,
301  double probability) -> double {
302  // typecast so we can use this to get probability value
303  DiscreteValues choices(assignment);
304  if (prunedDecisionTree->operator()(choices) == 0) {
305  EXPECT_DOUBLES_EQUAL(0.0, probability, 1e-9);
306  } else {
307  EXPECT_DOUBLES_EQUAL(original_discrete_conditionals(choices), probability,
308  1e-9);
309  }
310  return 0.0;
311  };
312 
313  // Get the pruned discrete conditionals as an AlgebraicDecisionTree
314  auto pruned_discrete_conditionals = posterior->at(4)->asDiscrete();
315  auto discrete_conditional_tree =
316  std::dynamic_pointer_cast<DecisionTreeFactor::ADT>(
317  pruned_discrete_conditionals);
318 
319  // The checker functor verifies the values for us.
320  discrete_conditional_tree->apply(checker);
321 }
322 
323 /* ****************************************************************************/
324 // Test HybridBayesNet sampling.
325 TEST(HybridBayesNet, Sampling) {
327 
328  auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0));
329  auto zero_motion =
330  std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
331  auto one_motion =
332  std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
333  std::vector<NonlinearFactor::shared_ptr> factors = {zero_motion, one_motion};
334  nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model);
336  KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors);
337 
338  DiscreteKey mode(M(0), 2);
340 
341  Values initial;
342  double z0 = 0.0, z1 = 1.0;
343  initial.insert<double>(X(0), z0);
344  initial.insert<double>(X(1), z1);
345 
346  // Create the factor graph from the nonlinear factor graph.
348  // Eliminate into BN
349  HybridBayesNet::shared_ptr bn = fg->eliminateSequential();
350 
351  // Set up sampling
352  std::mt19937_64 gen(11);
353 
354  // Initialize containers for computing the mean values.
355  vector<double> discrete_samples;
356  VectorValues average_continuous;
357 
358  size_t num_samples = 1000;
359  for (size_t i = 0; i < num_samples; i++) {
360  // Sample
361  HybridValues sample = bn->sample(&gen);
362 
363  discrete_samples.push_back(sample.discrete().at(M(0)));
364 
365  if (i == 0) {
366  average_continuous.insert(sample.continuous());
367  } else {
368  average_continuous += sample.continuous();
369  }
370  }
371 
372  EXPECT_LONGS_EQUAL(2, average_continuous.size());
373  EXPECT_LONGS_EQUAL(num_samples, discrete_samples.size());
374 
375  // Regressions don't work across platforms :-(
376  // // regression for specific RNG seed
377  // double discrete_sum =
378  // std::accumulate(discrete_samples.begin(), discrete_samples.end(),
379  // decltype(discrete_samples)::value_type(0));
380  // EXPECT_DOUBLES_EQUAL(0.477, discrete_sum / num_samples, 1e-9);
381 
382  // VectorValues expected;
383  // expected.insert({X(0), Vector1(-0.0131207162712)});
384  // expected.insert({X(1), Vector1(-0.499026377568)});
385  // // regression for specific RNG seed
386  // EXPECT(assert_equal(expected, average_continuous.scale(1.0 /
387  // num_samples)));
388 }
389 
390 /* ************************************************************************* */
391 int main() {
392  TestResult tr;
393  return TestRegistry::runAllTests(tr);
394 }
395 /* ************************************************************************* */
#define CHECK(condition)
Definition: Test.h:108
Matrix< RealScalar, Dynamic, Dynamic > M
Definition: bench_gemm.cpp:51
IsDerived< DERIVEDFACTOR > emplace_shared(Args &&... args)
Emplace a shared pointer to factor of given type.
Definition: FactorGraph.h:196
static int runAllTests(TestResult &result)
Factor Graph consisting of non-linear factors.
Eigen::Matrix< double, 1, 1 > Vector1
Definition: testEvent.cpp:33
bool equals(const This &fg, double tol=1e-9) const
GTSAM-style equals.
Matrix expected
Definition: testMatrix.cpp:971
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:40
std::pair< std::shared_ptr< BayesNetType >, std::shared_ptr< FactorGraphType > > eliminatePartialSequential(const Ordering &ordering, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
leaf::MyValues values
KeyVector keys() const
Definition: Values.cpp:218
const GaussianFactorGraph factors
Values initial
static const GaussianBayesNet gbn
static const Key asiaKey
Definition: BFloat16.h:88
double evaluate(const HybridValues &values) const
Evaluate hybrid probability density for given HybridValues.
iterator insert(const std::pair< Key, Vector > &key_value)
const VectorValues & continuous() const
Return the multi-dimensional vector values.
Definition: HybridValues.h:89
Vector & at(Key j)
Definition: VectorValues.h:139
static enum @1107 ordering
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
noiseModel::Isotropic::shared_ptr model1
IsDerived< DERIVEDFACTOR > add(std::shared_ptr< DERIVEDFACTOR > factor)
add is a synonym for push_back.
Definition: FactorGraph.h:214
A Bayes net of Gaussian Conditionals indexed by discrete keys.
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy y set format x g set format y g set format x2 g set format y2 g set format z g set angles radians set nogrid set key title set key left top Right noreverse box linetype linewidth samplen spacing width set nolabel set noarrow set nologscale set logscale x set set pointsize set encoding default set nopolar set noparametric set set set set surface set nocontour set clabel set mapping cartesian set nohidden3d set cntrparam order set cntrparam linear set cntrparam levels auto set cntrparam points set size ratio
EIGEN_DEVICE_FUNC const ExpReturnType exp() const
static const VectorValues vv
size_t size() const
Definition: FactorGraph.h:334
HybridGaussianFactorGraph linearizedFactorGraph
Definition: Switching.h:124
#define Z
Definition: icosphere.cpp:21
TEST(HybridBayesNet, Creation)
int main()
A conditional of gaussian mixtures indexed by discrete variables, as part of a Bayes Network...
Hybrid Bayes Tree, the result of eliminating a HybridJunctionTree.
#define EXPECT(condition)
Definition: Test.h:150
static const DiscreteKey Asia(asiaKey, 2)
std::shared_ptr< BayesNetType > eliminateSequential(OptionalOrderingType orderingType={}, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Array< double, 1, 3 > e(1./3., 0.5, 2.)
RealScalar s
std::shared_ptr< This > shared_ptr
shared_ptr to This
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
DecisionTree apply(const Unary &op) const
void emplace_back(Conditional *conditional)
noiseModel::Diagonal::shared_ptr SharedDiagonal
Definition: NoiseModel.h:743
traits
Definition: chartTesting.h:28
std::shared_ptr< DecisionTreeFactor > shared_ptr
Values linearizationPoint
Definition: Switching.h:125
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:343
const DiscreteValues & discrete() const
Return the discrete values.
Definition: HybridValues.h:92
std::pair< Key, size_t > DiscreteKey
Definition: DiscreteKey.h:38
std::shared_ptr< HybridGaussianFactorGraph > linearize(const Values &continuousValues) const
Linearize all the continuous factors in the HybridNonlinearFactorGraph.
void insert(Key j, const Vector &value)
Definition: HybridValues.h:117
HybridBayesNet createHybridBayesNet(size_t num_measurements=1, bool manyModes=false)
size_t size() const
Definition: VectorValues.h:127
static const DiscreteKey mode(modeKey, 2)
Implementation of a discrete conditional mixture factor.
Definition: MixtureFactor.h:47
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:86
#define X
Definition: icosphere.cpp:20
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:102
std::shared_ptr< HybridBayesNet > shared_ptr
DiscreteKeys is a set of keys that can be assembled using the & operator.
Definition: DiscreteKey.h:41


gtsam
Author(s):
autogenerated on Tue Jul 4 2023 02:38:22