testHybridBayesNet.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
24 
25 #include "Switching.h"
26 #include "TinyHybridExample.h"
27 
28 // Include for test suite
30 
31 using namespace std;
32 using namespace gtsam;
33 
38 
39 static const Key asiaKey = 0;
40 static const DiscreteKey Asia(asiaKey, 2);
41 
42 /* ****************************************************************************/
43 // Test creation of a pure discrete Bayes net.
44 TEST(HybridBayesNet, Creation) {
45  HybridBayesNet bayesNet;
46  bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
47 
49  CHECK(bayesNet.at(0)->asDiscrete());
50  EXPECT(assert_equal(expected, *bayesNet.at(0)->asDiscrete()));
51 }
52 
53 /* ****************************************************************************/
54 // Test adding a Bayes net to another one.
56  HybridBayesNet bayesNet;
57  bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
58 
60  other.add(bayesNet);
61  EXPECT(bayesNet.equals(other));
62 }
63 
64 /* ****************************************************************************/
65 // Test evaluate for a pure discrete Bayes net P(Asia).
66 TEST(HybridBayesNet, EvaluatePureDiscrete) {
67  HybridBayesNet bayesNet;
68  bayesNet.emplace_back(new DiscreteConditional(Asia, "4/6"));
70  values.insert(asiaKey, 0);
71  EXPECT_DOUBLES_EQUAL(0.4, bayesNet.evaluate(values), 1e-9);
72 }
73 
74 /* ****************************************************************************/
75 // Test creation of a tiny hybrid Bayes net.
77  auto bn = tiny::createHybridBayesNet();
78  EXPECT_LONGS_EQUAL(3, bn.size());
79 
80  const VectorValues vv{{Z(0), Vector1(5.0)}, {X(0), Vector1(5.0)}};
81  auto fg = bn.toFactorGraph(vv);
82  EXPECT_LONGS_EQUAL(3, fg.size());
83 
84  // Check that the ratio of probPrime to evaluate is the same for all modes.
85  std::vector<double> ratio(2);
86  for (size_t mode : {0, 1}) {
87  const HybridValues hv{vv, {{M(0), mode}}};
88  ratio[mode] = std::exp(-fg.error(hv)) / bn.evaluate(hv);
89  }
90  EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8);
91 }
92 
93 /* ****************************************************************************/
94 // Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
95 TEST(HybridBayesNet, evaluateHybrid) {
96  const auto continuousConditional = GaussianConditional::sharedMeanAndStddev(
97  X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0);
98 
99  const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)),
100  model1 = noiseModel::Diagonal::Sigmas(Vector1(3.0));
101 
102  const auto conditional0 = std::make_shared<GaussianConditional>(
103  X(1), Vector1::Constant(5), I_1x1, model0),
104  conditional1 = std::make_shared<GaussianConditional>(
105  X(1), Vector1::Constant(2), I_1x1, model1);
106 
107  // Create hybrid Bayes net.
108  HybridBayesNet bayesNet;
109  bayesNet.push_back(continuousConditional);
110  bayesNet.emplace_back(
111  new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1}));
112  bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
113 
114  // Create values at which to evaluate.
116  values.insert(asiaKey, 0);
117  values.insert(X(0), Vector1(-6));
118  values.insert(X(1), Vector1(1));
119 
120  const double conditionalProbability =
121  continuousConditional->evaluate(values.continuous());
122  const double mixtureProbability = conditional0->evaluate(values.continuous());
123  EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99,
124  bayesNet.evaluate(values), 1e-9);
125 }
126 
127 /* ****************************************************************************/
128 // Test choosing an assignment of conditionals
130  Switching s(4);
131 
132  const Ordering ordering(s.linearizationPoint.keys());
133 
134  const auto [hybridBayesNet, remainingFactorGraph] =
135  s.linearizedFactorGraph.eliminatePartialSequential(ordering);
136 
137  DiscreteValues assignment;
138  assignment[M(0)] = 1;
139  assignment[M(1)] = 1;
140  assignment[M(2)] = 0;
141 
142  GaussianBayesNet gbn = hybridBayesNet->choose(assignment);
143 
145 
146  EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asMixture())(assignment),
147  *gbn.at(0)));
148  EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asMixture())(assignment),
149  *gbn.at(1)));
150  EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asMixture())(assignment),
151  *gbn.at(2)));
152  EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asMixture())(assignment),
153  *gbn.at(3)));
154 }
155 
156 /* ****************************************************************************/
157 // Test error for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
159  const auto continuousConditional = GaussianConditional::sharedMeanAndStddev(
160  X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0);
161 
162  const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)),
163  model1 = noiseModel::Diagonal::Sigmas(Vector1(3.0));
164 
165  const auto conditional0 = std::make_shared<GaussianConditional>(
166  X(1), Vector1::Constant(5), I_1x1, model0),
167  conditional1 = std::make_shared<GaussianConditional>(
168  X(1), Vector1::Constant(2), I_1x1, model1);
169 
170  auto gm =
171  new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1});
172  // Create hybrid Bayes net.
173  HybridBayesNet bayesNet;
174  bayesNet.push_back(continuousConditional);
175  bayesNet.emplace_back(gm);
176  bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
177 
178  // Create values at which to evaluate.
180  values.insert(asiaKey, 0);
181  values.insert(X(0), Vector1(-6));
182  values.insert(X(1), Vector1(1));
183 
184  AlgebraicDecisionTree<Key> actual_errors =
185  bayesNet.errorTree(values.continuous());
186 
187  // Regression.
188  // Manually added all the error values from the 3 conditional types.
189  AlgebraicDecisionTree<Key> expected_errors(
190  {Asia}, std::vector<double>{2.33005033585, 5.38619084965});
191 
192  EXPECT(assert_equal(expected_errors, actual_errors));
193 }
194 
195 /* ****************************************************************************/
196 // Test Bayes net optimize
197 TEST(HybridBayesNet, OptimizeAssignment) {
198  Switching s(4);
199 
200  const Ordering ordering(s.linearizationPoint.keys());
201 
202  const auto [hybridBayesNet, remainingFactorGraph] =
203  s.linearizedFactorGraph.eliminatePartialSequential(ordering);
204 
205  DiscreteValues assignment;
206  assignment[M(0)] = 1;
207  assignment[M(1)] = 1;
208  assignment[M(2)] = 1;
209 
210  VectorValues delta = hybridBayesNet->optimize(assignment);
211 
212  // The linearization point has the same value as the key index,
213  // e.g. X(0) = 1, X(1) = 2,
214  // but the factors specify X(k) = k-1, so delta should be -1.
215  VectorValues expected_delta;
216  expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
217  expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
218  expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
219  expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
220 
221  EXPECT(assert_equal(expected_delta, delta));
222 }
223 
224 /* ****************************************************************************/
225 // Test Bayes net optimize
226 TEST(HybridBayesNet, Optimize) {
227  Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1");
228 
229  HybridBayesNet::shared_ptr hybridBayesNet =
230  s.linearizedFactorGraph.eliminateSequential();
231 
232  HybridValues delta = hybridBayesNet->optimize();
233 
234  // NOTE: The true assignment is 111, but the discrete priors cause 101
235  DiscreteValues expectedAssignment;
236  expectedAssignment[M(0)] = 1;
237  expectedAssignment[M(1)] = 1;
238  expectedAssignment[M(2)] = 1;
239  EXPECT(assert_equal(expectedAssignment, delta.discrete()));
240 
241  VectorValues expectedValues;
242  expectedValues.insert(X(0), -Vector1::Ones());
243  expectedValues.insert(X(1), -Vector1::Ones());
244  expectedValues.insert(X(2), -Vector1::Ones());
245  expectedValues.insert(X(3), -Vector1::Ones());
246 
247  EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
248 }
249 
250 /* ****************************************************************************/
251 // Test Bayes net error
252 TEST(HybridBayesNet, Pruning) {
253  Switching s(3);
254 
255  HybridBayesNet::shared_ptr posterior =
256  s.linearizedFactorGraph.eliminateSequential();
257  EXPECT_LONGS_EQUAL(5, posterior->size());
258 
259  HybridValues delta = posterior->optimize();
260  auto actualTree = posterior->evaluate(delta.continuous());
261 
262  // Regression test on density tree.
263  std::vector<DiscreteKey> discrete_keys = {{M(0), 2}, {M(1), 2}};
264  std::vector<double> leaves = {6.1112424, 20.346113, 17.785849, 19.738098};
265  AlgebraicDecisionTree<Key> expected(discrete_keys, leaves);
266  EXPECT(assert_equal(expected, actualTree, 1e-6));
267 
268  // Prune and get probabilities
269  auto prunedBayesNet = posterior->prune(2);
270  auto prunedTree = prunedBayesNet.evaluate(delta.continuous());
271 
272  // Regression test on pruned logProbability tree
273  std::vector<double> pruned_leaves = {0.0, 32.713418, 0.0, 31.735823};
274  AlgebraicDecisionTree<Key> expected_pruned(discrete_keys, pruned_leaves);
275  EXPECT(assert_equal(expected_pruned, prunedTree, 1e-6));
276 
277  // Verify logProbability computation and check specific logProbability value
278  const DiscreteValues discrete_values{{M(0), 1}, {M(1), 1}};
279  const HybridValues hybridValues{delta.continuous(), discrete_values};
280  double logProbability = 0;
281  logProbability += posterior->at(0)->asMixture()->logProbability(hybridValues);
282  logProbability += posterior->at(1)->asMixture()->logProbability(hybridValues);
283  logProbability += posterior->at(2)->asMixture()->logProbability(hybridValues);
284  // NOTE(dellaert): the discrete errors were not added in logProbability tree!
285  logProbability +=
286  posterior->at(3)->asDiscrete()->logProbability(hybridValues);
287  logProbability +=
288  posterior->at(4)->asDiscrete()->logProbability(hybridValues);
289 
290  // Regression
291  double density = exp(logProbability);
293  1.6078460548731697 * actualTree(discrete_values), 1e-6);
294  EXPECT_DOUBLES_EQUAL(density, prunedTree(discrete_values), 1e-9);
295  EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues),
296  1e-9);
297 }
298 
299 /* ****************************************************************************/
300 // Test Bayes net pruning
302  Switching s(4);
303 
304  HybridBayesNet::shared_ptr posterior =
305  s.linearizedFactorGraph.eliminateSequential();
306  EXPECT_LONGS_EQUAL(7, posterior->size());
307 
308  HybridValues delta = posterior->optimize();
309 
310  auto prunedBayesNet = posterior->prune(2);
311  HybridValues pruned_delta = prunedBayesNet.optimize();
312 
313  EXPECT(assert_equal(delta.discrete(), pruned_delta.discrete()));
314  EXPECT(assert_equal(delta.continuous(), pruned_delta.continuous()));
315 }
316 
317 /* ****************************************************************************/
318 // Test Bayes net updateDiscreteConditionals
319 TEST(HybridBayesNet, UpdateDiscreteConditionals) {
320  Switching s(4);
321 
322  HybridBayesNet::shared_ptr posterior =
323  s.linearizedFactorGraph.eliminateSequential();
324  EXPECT_LONGS_EQUAL(7, posterior->size());
325 
326  size_t maxNrLeaves = 3;
327  DiscreteConditional discreteConditionals;
328  for (auto&& conditional : *posterior) {
329  if (conditional->isDiscrete()) {
330  discreteConditionals =
331  discreteConditionals * (*conditional->asDiscrete());
332  }
333  }
334  const DecisionTreeFactor::shared_ptr prunedDecisionTree =
335  std::make_shared<DecisionTreeFactor>(
336  discreteConditionals.prune(maxNrLeaves));
337 
338 #ifdef GTSAM_DT_MERGING
339  EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/,
340  prunedDecisionTree->nrLeaves());
341 #else
342  EXPECT_LONGS_EQUAL(8 /*full tree*/, prunedDecisionTree->nrLeaves());
343 #endif
344 
345  // regression
346  DiscreteKeys dkeys{{M(0), 2}, {M(1), 2}, {M(2), 2}};
347  DecisionTreeFactor::ADT potentials(
348  dkeys, std::vector<double>{0, 0, 0, 0.505145423, 0, 1, 0, 0.494854577});
349  DiscreteConditional expected_discrete_conditionals(1, dkeys, potentials);
350 
351  // Prune!
352  posterior->prune(maxNrLeaves);
353 
354  // Functor to verify values against the expected_discrete_conditionals
355  auto checker = [&](const Assignment<Key>& assignment,
356  double probability) -> double {
357  // typecast so we can use this to get probability value
358  DiscreteValues choices(assignment);
359  if (prunedDecisionTree->operator()(choices) == 0) {
360  EXPECT_DOUBLES_EQUAL(0.0, probability, 1e-9);
361  } else {
362  EXPECT_DOUBLES_EQUAL(expected_discrete_conditionals(choices), probability,
363  1e-9);
364  }
365  return 0.0;
366  };
367 
368  // Get the pruned discrete conditionals as an AlgebraicDecisionTree
369  auto pruned_discrete_conditionals = posterior->at(4)->asDiscrete();
370  auto discrete_conditional_tree =
371  std::dynamic_pointer_cast<DecisionTreeFactor::ADT>(
372  pruned_discrete_conditionals);
373 
374  // The checker functor verifies the values for us.
375  discrete_conditional_tree->apply(checker);
376 }
377 
378 /* ****************************************************************************/
379 // Test HybridBayesNet sampling.
380 TEST(HybridBayesNet, Sampling) {
382 
383  auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0));
384  auto zero_motion =
385  std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
386  auto one_motion =
387  std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
388  std::vector<NonlinearFactor::shared_ptr> factors = {zero_motion, one_motion};
389  nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model);
391  KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors);
392 
393  DiscreteKey mode(M(0), 2);
395 
396  Values initial;
397  double z0 = 0.0, z1 = 1.0;
398  initial.insert<double>(X(0), z0);
399  initial.insert<double>(X(1), z1);
400 
401  // Create the factor graph from the nonlinear factor graph.
403  // Eliminate into BN
404  HybridBayesNet::shared_ptr bn = fg->eliminateSequential();
405 
406  // Set up sampling
407  std::mt19937_64 gen(11);
408 
409  // Initialize containers for computing the mean values.
410  vector<double> discrete_samples;
411  VectorValues average_continuous;
412 
413  size_t num_samples = 1000;
414  for (size_t i = 0; i < num_samples; i++) {
415  // Sample
416  HybridValues sample = bn->sample(&gen);
417 
418  discrete_samples.push_back(sample.discrete().at(M(0)));
419 
420  if (i == 0) {
421  average_continuous.insert(sample.continuous());
422  } else {
423  average_continuous += sample.continuous();
424  }
425  }
426 
427  EXPECT_LONGS_EQUAL(2, average_continuous.size());
428  EXPECT_LONGS_EQUAL(num_samples, discrete_samples.size());
429 
430  // Regressions don't work across platforms :-(
431  // // regression for specific RNG seed
432  // double discrete_sum =
433  // std::accumulate(discrete_samples.begin(), discrete_samples.end(),
434  // decltype(discrete_samples)::value_type(0));
435  // EXPECT_DOUBLES_EQUAL(0.477, discrete_sum / num_samples, 1e-9);
436 
437  // VectorValues expected;
438  // expected.insert({X(0), Vector1(-0.0131207162712)});
439  // expected.insert({X(1), Vector1(-0.499026377568)});
440  // // regression for specific RNG seed
441  // EXPECT(assert_equal(expected, average_continuous.scale(1.0 /
442  // num_samples)));
443 }
444 
445 /* ************************************************************************* */
446 int main() {
447  TestResult tr;
448  return TestRegistry::runAllTests(tr);
449 }
450 /* ************************************************************************* */
TestRegistry::runAllTests
static int runAllTests(TestResult &result)
Definition: TestRegistry.cpp:27
gtsam::HybridBayesNet::equals
bool equals(const This &fg, double tol=1e-9) const
GTSAM-style equals.
Definition: HybridBayesNet.cpp:36
gtsam::HybridGaussianFactorGraph::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to This
Definition: HybridGaussianFactorGraph.h:118
gtsam::HybridValues
Definition: HybridValues.h:38
Asia
static const DiscreteKey Asia(asiaKey, 2)
s
RealScalar s
Definition: level1_cplx_impl.h:126
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
EXPECT_LONGS_EQUAL
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
EXPECT
#define EXPECT(condition)
Definition: Test.h:150
TestHarness.h
gtsam::HybridBayesNet
Definition: HybridBayesNet.h:35
gtsam::HybridNonlinearFactorGraph
Definition: HybridNonlinearFactorGraph.h:33
gtsam::HybridBayesNet::evaluate
double evaluate(const HybridValues &values) const
Evaluate hybrid probability density for given HybridValues.
Definition: HybridBayesNet.cpp:354
gtsam::VectorValues::insert
iterator insert(const std::pair< Key, Vector > &key_value)
Definition: VectorValues.cpp:88
gtsam::HybridValues::continuous
const VectorValues & continuous() const
Return the multi-dimensional vector values.
Definition: HybridValues.h:89
z1
Point2 z1
Definition: testTriangulationFactor.cpp:45
gtsam::DiscreteDistribution
Definition: DiscreteDistribution.h:33
initial
Values initial
Definition: OdometryOptimize.cpp:2
Switching.h
simple_graph::factors
const GaussianFactorGraph factors
Definition: testJacobianFactor.cpp:213
gtsam::GaussianMixture
A conditional of gaussian mixtures indexed by discrete variables, as part of a Bayes Network....
Definition: GaussianMixture.h:53
gtsam::DecisionTreeFactor::prune
DecisionTreeFactor prune(size_t maxNrAssignments) const
Prune the decision tree of discrete variables.
Definition: DecisionTreeFactor.cpp:371
gtsam::PriorFactor
Definition: nonlinear/PriorFactor.h:30
HybridBayesNet.h
A Bayes net of Gaussian Conditionals indexed by discrete keys.
X
#define X
Definition: icosphere.cpp:20
gtsam::DiscreteKeys
DiscreteKeys is a set of keys that can be assembled using the & operator.
Definition: DiscreteKey.h:41
exp
const EIGEN_DEVICE_FUNC ExpReturnType exp() const
Definition: ArrayCwiseUnaryOps.h:97
gtsam::DecisionTreeFactor::shared_ptr
std::shared_ptr< DecisionTreeFactor > shared_ptr
Definition: DecisionTreeFactor.h:50
gtsam::KeyVector
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:92
main
int main()
Definition: testHybridBayesNet.cpp:446
gtsam::AlgebraicDecisionTree< Key >
gtsam::Switching
Definition: Switching.h:120
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
Vector1
Eigen::Matrix< double, 1, 1 > Vector1
Definition: testEvent.cpp:33
vv
static const VectorValues vv
Definition: testGaussianMixture.cpp:42
gtsam.examples.PlanarManipulatorExample.delta
def delta(g0, g1)
Definition: PlanarManipulatorExample.py:45
gtsam::VectorValues
Definition: VectorValues.h:74
gtsam::noiseModel::Isotropic
Definition: NoiseModel.h:527
TinyHybridExample.h
gtsam::SharedDiagonal
noiseModel::Diagonal::shared_ptr SharedDiagonal
Definition: NoiseModel.h:743
cholesky::expected
Matrix expected
Definition: testMatrix.cpp:971
density
Definition: testGaussianConditional.cpp:127
gtsam::Assignment< Key >
TEST
TEST(HybridBayesNet, Creation)
Definition: testHybridBayesNet.cpp:44
HybridBayesTree.h
Hybrid Bayes Tree, the result of eliminating a HybridJunctionTree.
sampling::gbn
static const GaussianBayesNet gbn
Definition: testGaussianBayesNet.cpp:170
EXPECT_DOUBLES_EQUAL
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
ordering
static enum @1096 ordering
TestResult
Definition: TestResult.h:26
gtsam::FactorGraph::size
size_t size() const
Definition: FactorGraph.h:297
gtsam::HybridValues::discrete
const DiscreteValues & discrete() const
Return the discrete values.
Definition: HybridValues.h:92
gtsam::HybridBayesNet::push_back
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
Definition: HybridBayesNet.h:69
gtsam::DiscreteConditional
Definition: DiscreteConditional.h:37
gtsam
traits
Definition: chartTesting.h:28
NonlinearFactorGraph.h
Factor Graph consisting of non-linear factors.
gtsam::DiscreteValues
Definition: DiscreteValues.h:34
leaf::values
leaf::MyValues values
gtsam::Values
Definition: Values.h:65
CHECK
#define CHECK(condition)
Definition: Test.h:108
gtsam::DiscreteKey
std::pair< Key, size_t > DiscreteKey
Definition: DiscreteKey.h:38
std
Definition: BFloat16.h:88
gtsam::HybridBayesNet::emplace_back
void emplace_back(Conditional *conditional)
Definition: HybridBayesNet.h:82
ratio
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy y set format x g set format y g set format x2 g set format y2 g set format z g set angles radians set nogrid set key title set key left top Right noreverse box linetype linewidth samplen spacing width set nolabel set noarrow set nologscale set logscale x set set pointsize set encoding default set nopolar set noparametric set set set set surface set nocontour set clabel set mapping cartesian set nohidden3d set cntrparam order set cntrparam linear set cntrparam levels auto set cntrparam points set size ratio
Definition: gnuplot_common_settings.hh:44
gtsam::Values::insert
void insert(Key j, const Value &val)
Definition: Values.cpp:155
gtsam::HybridNonlinearFactorGraph::linearize
std::shared_ptr< HybridGaussianFactorGraph > linearize(const Values &continuousValues) const
Linearize all the continuous factors in the HybridNonlinearFactorGraph.
Definition: HybridNonlinearFactorGraph.cpp:138
gtsam::VectorValues::size
size_t size() const
Definition: VectorValues.h:130
gtsam::assert_equal
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:40
gtsam::MixtureFactor
Implementation of a discrete conditional mixture factor.
Definition: MixtureFactor.h:47
initial
Definition: testScenarioRunner.cpp:148
mode
static const DiscreteKey mode(modeKey, 2)
gtsam::tiny::createHybridBayesNet
HybridBayesNet createHybridBayesNet(size_t num_measurements=1, bool manyModes=false)
Definition: TinyHybridExample.h:39
gtsam::HybridBayesNet::shared_ptr
std::shared_ptr< HybridBayesNet > shared_ptr
Definition: HybridBayesNet.h:40
model1
noiseModel::Isotropic::shared_ptr model1
Definition: testEssentialMatrixFactor.cpp:26
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
Z
#define Z
Definition: icosphere.cpp:21
gtsam::Ordering
Definition: inference/Ordering.h:33
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9
pybind_wrapper_test_script.other
other
Definition: pybind_wrapper_test_script.py:42
gtsam::GaussianBayesNet
Definition: GaussianBayesNet.h:35
gtsam::FactorGraph::emplace_shared
IsDerived< DERIVEDFACTOR > emplace_shared(Args &&... args)
Emplace a shared pointer to factor of given type.
Definition: FactorGraph.h:153
asiaKey
static const Key asiaKey
Definition: testHybridBayesNet.cpp:39
M
Matrix< RealScalar, Dynamic, Dynamic > M
Definition: bench_gemm.cpp:51


gtsam
Author(s):
autogenerated on Tue Jun 25 2024 03:05:58