testHybridGaussianFactor.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
21 #include <gtsam/base/Testable.h>
30 #include <gtsam/inference/Symbol.h>
35 
36 // Include for test suite
38 
39 #include <memory>
40 
41 using namespace std;
42 using namespace gtsam;
46 
47 /* ************************************************************************* */
48 // Check iterators of empty hybrid factor.
49 TEST(HybridGaussianFactor, Constructor) {
50  HybridGaussianFactor factor;
51  HybridGaussianFactor::const_iterator const_it = factor.begin();
52  CHECK(const_it == factor.end());
53  HybridGaussianFactor::iterator it = factor.begin();
54  CHECK(it == factor.end());
55 }
56 
57 /* ************************************************************************* */
58 namespace test_constructor {
59 DiscreteKey m1(1, 2);
60 
61 auto A1 = Matrix::Zero(2, 1);
62 auto A2 = Matrix::Zero(2, 2);
63 auto b = Matrix::Zero(2, 1);
64 
65 auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
66 auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
67 } // namespace test_constructor
68 
69 /* ************************************************************************* */
70 // Test simple to complex constructors...
71 TEST(HybridGaussianFactor, ConstructorVariants) {
72  using namespace test_constructor;
73  HybridGaussianFactor fromFactors({X(1), X(2)}, m1, {f10, f11});
74 
75  std::vector<GaussianFactorValuePair> pairs{{f10, 0.0}, {f11, 0.0}};
76  HybridGaussianFactor fromPairs({X(1), X(2)}, m1, pairs);
77  assert_equal(fromFactors, fromPairs);
78 
79  HybridGaussianFactor::FactorValuePairs decisionTree({m1}, pairs);
80  HybridGaussianFactor fromDecisionTree({X(1), X(2)}, {m1}, decisionTree);
81  assert_equal(fromDecisionTree, fromPairs);
82 }
83 
84 /* ************************************************************************* */
85 // "Add" two hybrid factors together.
87  using namespace test_constructor;
88  DiscreteKey m2(2, 3);
89 
90  auto A3 = Matrix::Zero(2, 3);
91  auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
92  auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
93  auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
94 
95  // TODO(Frank): why specify keys at all? And: keys in factor should be *all*
96  // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey?
97  // Design review!
98  HybridGaussianFactor hybridFactorA({X(1), X(2)}, m1, {f10, f11});
99  HybridGaussianFactor hybridFactorB({X(1), X(3)}, m2, {f20, f21, f22});
100 
101  // Check that number of keys is 3
102  EXPECT_LONGS_EQUAL(3, hybridFactorA.keys().size());
103 
104  // Check that number of discrete keys is 1
105  EXPECT_LONGS_EQUAL(1, hybridFactorA.discreteKeys().size());
106 
107  // Create sum of two hybrid factors: it will be a decision tree now on both
108  // discrete variables m1 and m2:
110  sum += hybridFactorA;
111  sum += hybridFactorB;
112 
113  // Let's check that this worked:
115  mode[m1.first] = 1;
116  mode[m2.first] = 2;
117  auto actual = sum(mode);
118  EXPECT(actual.at(0) == f11);
119  EXPECT(actual.at(1) == f22);
120 }
121 
122 /* ************************************************************************* */
124  using namespace test_constructor;
125  HybridGaussianFactor hybridFactor({X(1), X(2)}, m1, {f10, f11});
126 
127  std::string expected =
128  R"(HybridGaussianFactor
129 Hybrid [x1 x2; 1]{
130  Choice(1)
131  0 Leaf :
132  A[x1] = [
133  0;
134  0
135 ]
136  A[x2] = [
137  0, 0;
138  0, 0
139 ]
140  b = [ 0 0 ]
141  No noise model
142 
143  1 Leaf :
144  A[x1] = [
145  0;
146  0
147 ]
148  A[x2] = [
149  0, 0;
150  0, 0
151 ]
152  b = [ 0 0 ]
153  No noise model
154 
155 }
156 )";
157  EXPECT(assert_print_equal(expected, hybridFactor));
158 }
159 
160 /* ************************************************************************* */
162  KeyVector keys;
163  keys.push_back(X(0));
164  keys.push_back(X(1));
165 
166  DiscreteKeys dKeys;
167  dKeys.emplace_back(M(0), 2);
168  dKeys.emplace_back(M(1), 2);
169 
170  auto gaussians = std::make_shared<GaussianConditional>();
173 
174  EXPECT_LONGS_EQUAL(2, gm.discreteKeys().size());
175 }
176 
177 /* ************************************************************************* */
178 // Test the error of the HybridGaussianFactor
179 TEST(HybridGaussianFactor, Error) {
180  DiscreteKey m1(1, 2);
181 
182  auto A01 = Matrix2::Identity();
183  auto A02 = Matrix2::Identity();
184 
185  auto A11 = Matrix2::Identity();
186  auto A12 = Matrix2::Identity() * 2;
187 
188  auto b = Vector2::Zero();
189 
190  auto f0 = std::make_shared<JacobianFactor>(X(1), A01, X(2), A02, b);
191  auto f1 = std::make_shared<JacobianFactor>(X(1), A11, X(2), A12, b);
192  HybridGaussianFactor hybridFactor({X(1), X(2)}, m1, {f0, f1});
193 
194  VectorValues continuousValues;
195  continuousValues.insert(X(1), Vector2(0, 0));
196  continuousValues.insert(X(2), Vector2(1, 1));
197 
198  // error should return a tree of errors, with nodes for each discrete value.
199  AlgebraicDecisionTree<Key> error_tree =
200  hybridFactor.errorTree(continuousValues);
201 
202  std::vector<DiscreteKey> discrete_keys = {m1};
203  // Error values for regression test
204  std::vector<double> errors = {1, 4};
205  AlgebraicDecisionTree<Key> expected_error(discrete_keys, errors);
206 
207  EXPECT(assert_equal(expected_error, error_tree));
208 
209  // Test for single leaf given discrete assignment P(X|M,Z).
210  DiscreteValues discreteValues;
211  discreteValues[m1.first] = 1;
213  4.0, hybridFactor.error({continuousValues, discreteValues}), 1e-9);
214 }
215 
216 namespace test_gmm {
217 
224 double prob_m_z(double mu0, double mu1, double sigma0, double sigma1,
225  double z) {
226  double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1);
227  double d = sigma1 / sigma0;
228  double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2));
229  return 1 / (1 + e);
230 };
231 
232 static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
233  double sigma0, double sigma1) {
234  DiscreteKey m(M(0), 2);
235  Key z = Z(0);
236 
237  auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
238  auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
239 
240  auto c0 = make_shared<GaussianConditional>(z, Vector1(mu0), I_1x1, model0),
241  c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
242 
243  HybridBayesNet hbn;
244  DiscreteKeys discreteParents{m};
246  KeyVector{z}, KeyVector{}, discreteParents,
248  std::vector{c0, c1}));
249 
250  auto mixing = make_shared<DiscreteConditional>(m, "50/50");
251  hbn.push_back(mixing);
252 
253  return hbn;
254 }
255 
256 } // namespace test_gmm
257 
258 /* ************************************************************************* */
268 TEST(HybridGaussianFactor, GaussianMixtureModel) {
269  using namespace test_gmm;
270 
271  double mu0 = 1.0, mu1 = 3.0;
272  double sigma = 2.0;
273 
274  DiscreteKey m(M(0), 2);
275  Key z = Z(0);
276 
277  auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma);
278 
279  // The result should be a sigmoid.
280  // So should be P(m=1|z) = 0.5 at z=3.0 - 1.0=2.0
281  double midway = mu1 - mu0, lambda = 4;
282  {
283  VectorValues given;
284  given.insert(z, Vector1(midway));
285 
286  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
288 
290  prob_m_z(mu0, mu1, sigma, sigma, midway),
291  bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}),
292  1e-8);
293 
294  // At the halfway point between the means, we should get P(m|z)=0.5
296  expected.emplace_shared<DiscreteConditional>(m, "50/50");
297 
299  }
300  {
301  // Shift by -lambda
302  VectorValues given;
303  given.insert(z, Vector1(midway - lambda));
304 
305  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
307 
309  prob_m_z(mu0, mu1, sigma, sigma, midway - lambda),
310  bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}),
311  1e-8);
312  }
313  {
314  // Shift by lambda
315  VectorValues given;
316  given.insert(z, Vector1(midway + lambda));
317 
318  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
320 
322  prob_m_z(mu0, mu1, sigma, sigma, midway + lambda),
323  bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}),
324  1e-8);
325  }
326 }
327 
328 /* ************************************************************************* */
340 TEST(HybridGaussianFactor, GaussianMixtureModel2) {
341  using namespace test_gmm;
342 
343  double mu0 = 1.0, mu1 = 3.0;
344  double sigma0 = 8.0, sigma1 = 4.0;
345 
346  DiscreteKey m(M(0), 2);
347  Key z = Z(0);
348 
349  auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1);
350 
351  double m1_high = 3.133, lambda = 4;
352  {
353  // The result should be a bell curve like function
354  // with m1 > m0 close to 3.1333.
355  // We get 3.1333 by finding the maximum value of the function.
356  VectorValues given;
357  given.insert(z, Vector1(3.133));
358 
359  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
361 
363  prob_m_z(mu0, mu1, sigma0, sigma1, m1_high),
364  bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8);
365 
366  // At the halfway point between the means
368  expected.emplace_shared<DiscreteConditional>(
369  m, DiscreteKeys{},
370  vector<double>{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high),
371  prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)});
372 
374  }
375  {
376  // Shift by -lambda
377  VectorValues given;
378  given.insert(z, Vector1(m1_high - lambda));
379 
380  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
382 
384  prob_m_z(mu0, mu1, sigma0, sigma1, m1_high - lambda),
385  bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}),
386  1e-8);
387  }
388  {
389  // Shift by lambda
390  VectorValues given;
391  given.insert(z, Vector1(m1_high + lambda));
392 
393  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
395 
397  prob_m_z(mu0, mu1, sigma0, sigma1, m1_high + lambda),
398  bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}),
399  1e-8);
400  }
401 }
402 
403 namespace test_two_state_estimation {
404 
405 DiscreteKey m1(M(1), 2);
406 
407 void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) {
408  auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma);
409  hbn.emplace_shared<GaussianConditional>(z_key, Vector1(0.0), I_1x1, x_key,
410  -I_1x1, measurement_model);
411 }
412 
415  double mu0, double mu1, double sigma0, double sigma1) {
416  auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
417  auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
418  auto c0 = make_shared<GaussianConditional>(X(1), Vector1(mu0), I_1x1, X(0),
419  -I_1x1, model0),
420  c1 = make_shared<GaussianConditional>(X(1), Vector1(mu1), I_1x1, X(0),
421  -I_1x1, model1);
422  DiscreteKeys discreteParents{m1};
423  return std::make_shared<HybridGaussianConditional>(
424  KeyVector{X(1)}, KeyVector{X(0)}, discreteParents,
426  std::vector{c0, c1}));
427 }
428 
431  const HybridGaussianConditional::shared_ptr &hybridMotionModel,
432  bool add_second_measurement = false) {
433  HybridBayesNet hbn;
434 
435  // Add measurement model p(z0 | x0)
436  addMeasurement(hbn, Z(0), X(0), 3.0);
437 
438  // Optionally add second measurement model p(z1 | x1)
439  if (add_second_measurement) {
440  addMeasurement(hbn, Z(1), X(1), 3.0);
441  }
442 
443  // Add hybrid motion model
444  hbn.push_back(hybridMotionModel);
445 
446  // Discrete uniform prior.
447  hbn.emplace_shared<DiscreteConditional>(m1, "50/50");
448 
449  return hbn;
450 }
451 
453 std::pair<double, double> approximateDiscreteMarginal(
454  const HybridBayesNet &hbn,
455  const HybridGaussianConditional::shared_ptr &hybridMotionModel,
456  const VectorValues &given, size_t N = 100000) {
460  q.push_back(hybridMotionModel); // Add hybrid motion model
461  q.emplace_shared<GaussianConditional>(GaussianConditional::FromMeanAndStddev(
462  X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0
463  q.emplace_shared<DiscreteConditional>(m1, "50/50"); // Discrete prior.
464 
465  // Do importance sampling
466  double w0 = 0.0, w1 = 0.0;
467  std::mt19937_64 rng(42);
468  for (int i = 0; i < N; i++) {
469  HybridValues sample = q.sample(&rng);
470  sample.insert(given);
471  double weight = hbn.evaluate(sample) / q.evaluate(sample);
472  (sample.atDiscrete(M(1)) == 0) ? w0 += weight : w1 += weight;
473  }
474  double pm1 = w1 / (w0 + w1);
475  std::cout << "p(m0) = " << 100 * (1.0 - pm1) << std::endl;
476  std::cout << "p(m1) = " << 100 * pm1 << std::endl;
477  return {1.0 - pm1, pm1};
478 }
479 
480 } // namespace test_two_state_estimation
481 
482 /* ************************************************************************* */
494 TEST(HybridGaussianFactor, TwoStateModel) {
495  using namespace test_two_state_estimation;
496 
497  double mu0 = 1.0, mu1 = 3.0;
498  double sigma = 0.5;
499  auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma, sigma);
500 
501  // Start with no measurement on x1, only on x0
502  const Vector1 z0(0.5);
503 
504  VectorValues given;
505  given.insert(Z(0), z0);
506 
507  {
508  HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
509  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
511 
512  // Since no measurement on x1, we hedge our bets
513  // Importance sampling run with 100k samples gives 50.051/49.949
514  // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
515  DiscreteConditional expected(m1, "50/50");
516  EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete())));
517  }
518 
519  {
520  // If we set z1=4.5 (>> 2.5 which is the halfway point),
521  // probability of discrete mode should be leaning to m1==1.
522  const Vector1 z1(4.5);
523  given.insert(Z(1), z1);
524 
525  HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
526  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
528 
529  // Since we have a measurement on x1, we get a definite result
530  // Values taken from an importance sampling run with 100k samples:
531  // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
532  DiscreteConditional expected(m1, "44.3854/55.6146");
533  EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
534  }
535 }
536 
537 /* ************************************************************************* */
550 TEST(HybridGaussianFactor, TwoStateModel2) {
551  using namespace test_two_state_estimation;
552 
553  double mu0 = 1.0, mu1 = 3.0;
554  double sigma0 = 0.5, sigma1 = 2.0;
555  auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1);
556 
557  // Start with no measurement on x1, only on x0
558  const Vector1 z0(0.5);
559  VectorValues given;
560  given.insert(Z(0), z0);
561 
562  {
563  HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
564  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
565 
566  // Check that ratio of Bayes net and factor graph for different modes is
567  // equal for several values of {x0,x1}.
568  for (VectorValues vv :
569  {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
570  VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
571  vv.insert(given); // add measurements for HBN
572  HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
573  EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
574  gfg.error(hv1) / hbn.error(hv1), 1e-9);
575  }
576 
578 
579  // Importance sampling run with 100k samples gives 50.095/49.905
580  // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
581 
582  // Since no measurement on x1, we a 50/50 probability
583  auto p_m = bn->at(2)->asDiscrete();
584  EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9);
585  EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9);
586  }
587 
588  {
589  // Now we add a measurement z1 on x1
590  const Vector1 z1(4.0); // favors m==1
591  given.insert(Z(1), z1);
592 
593  HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
594  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
595 
596  // Check that ratio of Bayes net and factor graph for different modes is
597  // equal for several values of {x0,x1}.
598  for (VectorValues vv :
599  {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
600  VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
601  vv.insert(given); // add measurements for HBN
602  HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
603  EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
604  gfg.error(hv1) / hbn.error(hv1), 1e-9);
605  }
606 
608 
609  // Values taken from an importance sampling run with 100k samples:
610  // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
611  DiscreteConditional expected(m1, "48.3158/51.6842");
612  EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
613  }
614 
615  {
616  // Add a different measurement z1 on x1 that favors m==0
617  const Vector1 z1(1.1);
618  given.insert_or_assign(Z(1), z1);
619 
620  HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
621  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
623 
624  // Values taken from an importance sampling run with 100k samples:
625  // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
626  DiscreteConditional expected(m1, "55.396/44.604");
627  EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
628  }
629 }
630 
631 /* ************************************************************************* */
644 TEST(HybridGaussianFactor, TwoStateModel3) {
645  using namespace test_two_state_estimation;
646 
647  double mu = 1.0;
648  double sigma0 = 0.5, sigma1 = 2.0;
649  auto hybridMotionModel = CreateHybridMotionModel(mu, mu, sigma0, sigma1);
650 
651  // Start with no measurement on x1, only on x0
652  const Vector1 z0(0.5);
653  VectorValues given;
654  given.insert(Z(0), z0);
655 
656  {
657  HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
658  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
659 
660  // Check that ratio of Bayes net and factor graph for different modes is
661  // equal for several values of {x0,x1}.
662  for (VectorValues vv :
663  {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
664  VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
665  vv.insert(given); // add measurements for HBN
666  HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
667  EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
668  gfg.error(hv1) / hbn.error(hv1), 1e-9);
669  }
670 
672 
673  // Importance sampling run with 100k samples gives 50.095/49.905
674  // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
675 
676  // Since no measurement on x1, we a 50/50 probability
677  auto p_m = bn->at(2)->asDiscrete();
678  EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9);
679  EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9);
680  }
681 
682  {
683  // Now we add a measurement z1 on x1
684  const Vector1 z1(4.0); // favors m==1
685  given.insert(Z(1), z1);
686 
687  HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
688  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
689 
690  // Check that ratio of Bayes net and factor graph for different modes is
691  // equal for several values of {x0,x1}.
692  for (VectorValues vv :
693  {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
694  VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
695  vv.insert(given); // add measurements for HBN
696  HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
697  EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
698  gfg.error(hv1) / hbn.error(hv1), 1e-9);
699  }
700 
702 
703  // Values taken from an importance sampling run with 100k samples:
704  // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
705  DiscreteConditional expected(m1, "51.7762/48.2238");
706  EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
707  }
708 
709  {
710  // Add a different measurement z1 on x1 that favors m==1
711  const Vector1 z1(7.0);
712  given.insert_or_assign(Z(1), z1);
713 
714  HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
715  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
717 
718  // Values taken from an importance sampling run with 100k samples:
719  // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
720  DiscreteConditional expected(m1, "49.0762/50.9238");
721  EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.005));
722  }
723 }
724 
725 /* ************************************************************************* */
731 TEST(HybridGaussianFactor, TwoStateModel4) {
732  using namespace test_two_state_estimation;
733 
734  double mu0 = 0.0, mu1 = 10.0;
735  double sigma0 = 0.2, sigma1 = 5.0;
736  auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1);
737 
738  // We only check the 2-measurement case
739  const Vector1 z0(0.0), z1(10.0);
740  VectorValues given{{Z(0), z0}, {Z(1), z1}};
741 
742  HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
743  HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
745 
746  // Values taken from an importance sampling run with 100k samples:
747  // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
748  DiscreteConditional expected(m1, "8.91527/91.0847");
749  EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
750 }
751 
752 namespace test_direct_factor_graph {
766  const gtsam::Values &values, const std::vector<double> &means,
767  const std::vector<double> &sigmas, DiscreteKey &m1,
768  double measurement_noise = 1e-3) {
769  auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]);
770  auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]);
771  auto prior_noise = noiseModel::Isotropic::Sigma(1, measurement_noise);
772 
773  auto f0 =
774  std::make_shared<BetweenFactor<double>>(X(0), X(1), means[0], model0)
775  ->linearize(values);
776  auto f1 =
777  std::make_shared<BetweenFactor<double>>(X(0), X(1), means[1], model1)
778  ->linearize(values);
779 
780  // Create HybridGaussianFactor
781  // We take negative since we want
782  // the underlying scalar to be log(\sqrt(|2πΣ|))
783  std::vector<GaussianFactorValuePair> factors{{f0, model0->negLogConstant()},
784  {f1, model1->negLogConstant()}};
785  HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors);
786 
788  hfg.push_back(motionFactor);
789 
790  hfg.push_back(PriorFactor<double>(X(0), values.at<double>(X(0)), prior_noise)
791  .linearize(values));
792 
793  return hfg;
794 }
795 } // namespace test_direct_factor_graph
796 
797 /* ************************************************************************* */
805 TEST(HybridGaussianFactor, DifferentMeansFG) {
806  using namespace test_direct_factor_graph;
807 
808  DiscreteKey m1(M(1), 2);
809 
810  Values values;
811  double x1 = 0.0, x2 = 1.75;
812  values.insert(X(0), x1);
813  values.insert(X(1), x2);
814 
815  std::vector<double> means = {0.0, 2.0}, sigmas = {1e-0, 1e-0};
816 
818 
819  {
820  auto bn = hfg.eliminateSequential();
821  HybridValues actual = bn->optimize();
822 
824  VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}},
825  DiscreteValues{{M(1), 0}});
826 
827  EXPECT(assert_equal(expected, actual));
828 
829  DiscreteValues dv0{{M(1), 0}};
830  VectorValues cont0 = bn->optimize(dv0);
831  double error0 = bn->error(HybridValues(cont0, dv0));
832  // regression
833  EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9);
834 
835  DiscreteValues dv1{{M(1), 1}};
836  VectorValues cont1 = bn->optimize(dv1);
837  double error1 = bn->error(HybridValues(cont1, dv1));
838  EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9);
839  }
840 
841  {
842  auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
843  hfg.push_back(
844  PriorFactor<double>(X(1), means[1], prior_noise).linearize(values));
845 
846  auto bn = hfg.eliminateSequential();
847  HybridValues actual = bn->optimize();
848 
850  VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}},
851  DiscreteValues{{M(1), 1}});
852 
853  EXPECT(assert_equal(expected, actual));
854 
855  {
856  DiscreteValues dv{{M(1), 0}};
857  VectorValues cont = bn->optimize(dv);
858  double error = bn->error(HybridValues(cont, dv));
859  // regression
860  EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9);
861  }
862  {
863  DiscreteValues dv{{M(1), 1}};
864  VectorValues cont = bn->optimize(dv);
865  double error = bn->error(HybridValues(cont, dv));
866  // regression
867  EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9);
868  }
869  }
870 }
871 
872 /* ************************************************************************* */
880 TEST(HybridGaussianFactor, DifferentCovariancesFG) {
881  using namespace test_direct_factor_graph;
882 
883  DiscreteKey m1(M(1), 2);
884 
885  Values values;
886  double x1 = 1.0, x2 = 1.0;
887  values.insert(X(0), x1);
888  values.insert(X(1), x2);
889 
890  std::vector<double> means = {0.0, 0.0}, sigmas = {1e2, 1e-2};
891 
892  // Create FG with HybridGaussianFactor and prior on X1
894  auto hbn = fg.eliminateSequential();
895 
896  VectorValues cv;
897  cv.insert(X(0), Vector1(0.0));
898  cv.insert(X(1), Vector1(0.0));
899 
900  // Check that the error values at the MLE point μ.
901  AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv);
902 
903  DiscreteValues dv0{{M(1), 0}};
904  DiscreteValues dv1{{M(1), 1}};
905 
906  // regression
907  EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9);
908  EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9);
909 
910  DiscreteConditional expected_m1(m1, "0.5/0.5");
911  DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete());
912 
913  EXPECT(assert_equal(expected_m1, actual_m1));
914 }
915 
916 /* ************************************************************************* */
917 int main() {
918  TestResult tr;
919  return TestRegistry::runAllTests(tr);
920 }
921 /* ************************************************************************* */
TestRegistry::runAllTests
static int runAllTests(TestResult &result)
Definition: TestRegistry.cpp:27
gtsam::EliminateableFactorGraph::eliminateSequential
std::shared_ptr< BayesNetType > eliminateSequential(OptionalOrderingType orderingType={}, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Definition: EliminateableFactorGraph-inst.h:29
simple_graph::sigma1
double sigma1
Definition: testJacobianFactor.cpp:193
GaussianFactorGraph.h
Linear Factor Graph where all factors are Gaussians.
gtsam::HybridValues
Definition: HybridValues.h:37
rng
static std::mt19937 rng
Definition: timeFactorOverhead.cpp:31
test_constructor::b
auto b
Definition: testHybridGaussianFactor.cpp:63
test_constructor::f1
auto f1
Definition: testHybridNonlinearFactor.cpp:56
A3
static const double A3[]
Definition: expn.h:8
gtsam::HybridGaussianConditional::shared_ptr
std::shared_ptr< This > shared_ptr
Definition: HybridGaussianConditional.h:58
test_gmm
Definition: testHybridGaussianFactor.cpp:188
HybridGaussianConditional.h
A hybrid conditional in the Conditional Linear Gaussian scheme.
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
d
static const double d[K][N]
Definition: igam.h:11
gtsam::FactorGraph::error
double error(const HybridValues &values) const
Definition: FactorGraph-inst.h:66
EXPECT_LONGS_EQUAL
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
Testable.h
Concept check for values that can be used in unit tests.
EXPECT
#define EXPECT(condition)
Definition: Test.h:150
gtsam::Vector2
Eigen::Vector2d Vector2
Definition: Vector.h:42
test_constructor::sigmas
Vector1 sigmas
Definition: testHybridNonlinearFactor.cpp:52
TestHarness.h
gtsam::HybridBayesNet
Definition: HybridBayesNet.h:36
keys
const KeyVector keys
Definition: testRegularImplicitSchurFactor.cpp:40
mu
double mu
Definition: testBoundingConstraint.cpp:37
gtsam::HybridBayesNet::evaluate
double evaluate(const HybridValues &values) const
Evaluate hybrid probability density for given HybridValues.
Definition: HybridBayesNet.cpp:357
gtsam::VectorValues::insert
iterator insert(const std::pair< Key, Vector > &key_value)
Definition: VectorValues.cpp:88
test_constructor
Definition: testHybridGaussianFactor.cpp:58
z1
Point2 z1
Definition: testTriangulationFactor.cpp:45
gtsam::VectorValues::at
Vector & at(Key j)
Definition: VectorValues.h:142
gtsam::assert_print_equal
bool assert_print_equal(const std::string &expected, const V &actual, const std::string &s="")
Definition: TestableAssertions.h:352
simple_graph::factors
const GaussianFactorGraph factors
Definition: testJacobianFactor.cpp:213
test_two_state_estimation::CreateHybridMotionModel
static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(double mu0, double mu1, double sigma0, double sigma1)
Create hybrid motion model p(x1 | x0, m1)
Definition: testHybridGaussianFactor.cpp:386
gtsam::PriorFactor
Definition: nonlinear/PriorFactor.h:30
HybridBayesNet.h
A Bayes net of Gaussian Conditionals indexed by discrete keys.
test_two_state_estimation
Definition: testHybridGaussianFactor.cpp:375
X
#define X
Definition: icosphere.cpp:20
gtsam::DiscreteKeys
DiscreteKeys is a set of keys that can be assembled using the & operator.
Definition: DiscreteKey.h:41
vv
static const VectorValues vv
Definition: testHybridGaussianConditional.cpp:41
DiscreteConditional.h
exp
const EIGEN_DEVICE_FUNC ExpReturnType exp() const
Definition: ArrayCwiseUnaryOps.h:97
gtsam::Factor::begin
const_iterator begin() const
Definition: Factor.h:146
gtsam::means
Point2Pair means(const std::vector< Point2Pair > &abPointPairs)
Calculate the two means of a set of Point2 pairs.
Definition: Point2.cpp:116
gtsam::KeyVector
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:92
test_gmm::prob_m_z
double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, double z)
Definition: testHybridGaussianFactor.cpp:196
HybridGaussianFactor.h
A set of GaussianFactors, indexed by a set of discrete keys.
equal_constants::conditionals
const std::vector< GaussianConditional::shared_ptr > conditionals
Definition: testHybridGaussianConditional.cpp:50
test_constructor::f10
auto f10
Definition: testHybridGaussianFactor.cpp:65
TestableAssertions.h
Provides additional testing facilities for common data structures.
sampling::sigma
static const double sigma
Definition: testGaussianBayesNet.cpp:170
gtsam::AlgebraicDecisionTree< Key >
c1
static double c1
Definition: airy.c:54
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
gtsam::NoiseModelFactor::linearize
std::shared_ptr< GaussianFactor > linearize(const Values &x) const override
Definition: NonlinearFactor.cpp:150
m2
MatrixType m2(n_dims)
gtsam::Values::at
const ValueType at(Key j) const
Definition: Values-inl.h:261
Vector1
Eigen::Matrix< double, 1, 1 > Vector1
Definition: testEvent.cpp:33
PriorFactor.h
gtsam::VectorValues
Definition: VectorValues.h:74
test_constructor::f0
auto f0
Definition: testHybridNonlinearFactor.cpp:55
Eigen::numext::q
EIGEN_DEVICE_FUNC const Scalar & q
Definition: SpecialFunctionsImpl.h:1984
BetweenFactor.h
x1
Pose3 x1
Definition: testPose3.cpp:663
mode
static const DiscreteKey mode(modeKey, 2)
gtsam::HybridBayesNet::errorTree
AlgebraicDecisionTree< Key > errorTree(const VectorValues &continuousValues) const
Compute conditional error for each discrete assignment, and return as a tree.
Definition: HybridBayesNet.cpp:288
gtsam::GaussianConditional
Definition: GaussianConditional.h:40
A2
static const double A2[]
Definition: expn.h:7
cholesky::expected
Matrix expected
Definition: testMatrix.cpp:971
gtsam::HybridGaussianConditional
A conditional of gaussian conditionals indexed by discrete variables, as part of a Bayes Network....
Definition: HybridGaussianConditional.h:53
main
int main()
Definition: testHybridGaussianFactor.cpp:889
gtsam::HybridGaussianFactorGraph
Definition: HybridGaussianFactorGraph.h:104
test_gmm::GetGaussianMixtureModel
static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, double sigma0, double sigma1)
Definition: testHybridGaussianFactor.cpp:204
test_constructor::f11
auto f11
Definition: testHybridGaussianFactor.cpp:66
Symbol.h
gtsam::Assignment< Key >
pybind_wrapper_test_script.z
z
Definition: pybind_wrapper_test_script.py:61
gtsam::HybridValues::insert
void insert(Key j, const Vector &value)
Definition: HybridValues.cpp:85
VectorValues.h
Factor Graph Values.
m
Matrix3f m
Definition: AngleAxis_mimic_euler.cpp:1
gtsam::Factor::end
const_iterator end() const
Definition: Factor.h:149
A12
static const double A12[]
Definition: expn.h:17
EXPECT_DOUBLES_EQUAL
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
lambda
static double lambda[]
Definition: jv.c:524
TestResult
Definition: TestResult.h:26
gtsam::DecisionTree
a decision tree is a function from assignments to values.
Definition: DecisionTree.h:63
gtsam::HybridBayesNet::push_back
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
Definition: HybridBayesNet.h:70
gtsam::DiscreteConditional
Definition: DiscreteConditional.h:37
TEST
TEST(HybridGaussianFactor, Constructor)
Definition: testHybridGaussianFactor.cpp:49
HybridGaussianFactorGraph.h
Linearized Hybrid factor graph that uses type erasure.
gtsam
traits
Definition: chartTesting.h:28
gtsam::HybridValues::atDiscrete
size_t & atDiscrete(Key j)
Definition: HybridValues.cpp:132
DiscreteValues.h
error
static double error
Definition: testRot3.cpp:37
gtsam::DiscreteValues
Definition: DiscreteValues.h:34
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
leaf::values
leaf::MyValues values
gtsam::Values
Definition: Values.h:65
gtsam::VectorValues::insert_or_assign
void insert_or_assign(Key j, const Vector &value)
Definition: VectorValues.h:222
CHECK
#define CHECK(condition)
Definition: Test.h:108
gtsam::DiscreteKey
std::pair< Key, size_t > DiscreteKey
Definition: DiscreteKey.h:38
std
Definition: BFloat16.h:88
gtsam::HybridGaussianFactor
Implementation of a discrete-conditioned hybrid factor. Implements a joint discrete-continuous factor...
Definition: HybridGaussianFactor.h:59
test_two_state_estimation::approximateDiscreteMarginal
std::pair< double, double > approximateDiscreteMarginal(const HybridBayesNet &hbn, const HybridGaussianConditional::shared_ptr &hybridMotionModel, const VectorValues &given, size_t N=100000)
Approximate the discrete marginal P(m1) using importance sampling.
Definition: testHybridGaussianFactor.cpp:425
gtsam::Values::insert
void insert(Key j, const Value &val)
Definition: Values.cpp:155
A1
static const double A1[]
Definition: expn.h:6
test_two_state_estimation::CreateBayesNet
HybridBayesNet CreateBayesNet(const HybridGaussianConditional::shared_ptr &hybridMotionModel, bool add_second_measurement=false)
Create two state Bayes network with 1 or two measurement models.
Definition: testHybridGaussianFactor.cpp:402
A11
static const double A11[]
Definition: expn.h:16
gtsam::assert_equal
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:40
hv0
static const HybridValues hv0
Definition: testHybridGaussianConditional.cpp:43
test_two_state_estimation::m1
DiscreteKey m1(M(1), 2)
Eigen::Matrix
The matrix class, also used for vectors and row-vectors.
Definition: 3rdparty/Eigen/Eigen/src/Core/Matrix.h:178
test_direct_factor_graph::CreateFactorGraph
static HybridGaussianFactorGraph CreateFactorGraph(const gtsam::Values &values, const std::vector< double > &means, const std::vector< double > &sigmas, DiscreteKey &m1, double measurement_noise=1e-3)
Create a Factor Graph by directly specifying all the factors instead of creating conditionals first....
Definition: testHybridGaussianFactor.cpp:737
N
#define N
Definition: igam.h:9
gtsam::HybridBayesNet::shared_ptr
std::shared_ptr< HybridBayesNet > shared_ptr
Definition: HybridBayesNet.h:41
test_direct_factor_graph
Definition: testHybridGaussianFactor.cpp:724
gtsam::HybridBayesNet::toFactorGraph
HybridGaussianFactorGraph toFactorGraph(const VectorValues &measurements) const
Definition: HybridBayesNet.cpp:362
model1
noiseModel::Isotropic::shared_ptr model1
Definition: testEssentialMatrixFactor.cpp:26
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
hv1
static const HybridValues hv1
Definition: testHybridGaussianConditional.cpp:44
Z
#define Z
Definition: icosphere.cpp:21
HybridValues.h
test_two_state_estimation::addMeasurement
void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma)
Definition: testHybridGaussianFactor.cpp:379
x2
Pose3 x2(Rot3::Ypr(0.0, 0.0, 0.0), l2)
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9
gtsam::HybridBayesNet::emplace_shared
void emplace_shared(Args &&...args)
Definition: HybridBayesNet.h:115
M
Matrix< RealScalar, Dynamic, Dynamic > M
Definition: bench_gemm.cpp:51


gtsam
Author(s):
autogenerated on Wed Sep 25 2024 03:10:20