testHybridNonlinearFactorGraph.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
3  * Atlanta, Georgia 30332-0415
4  * All Rights Reserved
5  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
6  * See LICENSE for the license information
7  * -------------------------------------------------------------------------- */
8 
19 #include <gtsam/base/utilities.h>
23 #include <gtsam/geometry/Pose2.h>
34 
35 #include <numeric>
36 
37 #include "Switching.h"
38 
39 // Include for test suite
41 
42 using namespace std;
43 using namespace gtsam;
48 
49 /* ****************************************************************************
50  * Test that any linearizedFactorGraph gaussian factors are appended to the
51  * existing gaussian factor graph in the hybrid factor graph.
52  */
55 
56  // Add a simple prior factor to the nonlinear factor graph
57  fg.emplace_shared<PriorFactor<double>>(X(0), 0, Isotropic::Sigma(1, 0.1));
58 
59  // Linearization point
60  Values linearizationPoint;
61  linearizationPoint.insert<double>(X(0), 0);
62 
63  // Linearize the factor graph.
64  HybridGaussianFactorGraph ghfg = *fg.linearize(linearizationPoint);
65  EXPECT_LONGS_EQUAL(1, ghfg.size());
66 
67  // Check that the error is the same for the nonlinear values.
68  const VectorValues zero{{X(0), Vector1(0)}};
69  const HybridValues hybridValues{zero, {}, linearizationPoint};
70  EXPECT_DOUBLES_EQUAL(fg.error(hybridValues), ghfg.error(hybridValues), 1e-9);
71 }
72 
73 /***************************************************************************
74  * Test equality for Hybrid Nonlinear Factor Graph.
75  */
79 
80  // Test empty factor graphs
81  EXPECT(assert_equal(graph1, graph2));
82 
83  auto f0 = std::make_shared<PriorFactor<Pose2>>(
84  1, Pose2(), noiseModel::Isotropic::Sigma(3, 0.001));
85  graph1.push_back(f0);
86  graph2.push_back(f0);
87 
88  auto f1 = std::make_shared<BetweenFactor<Pose2>>(
89  1, 2, Pose2(), noiseModel::Isotropic::Sigma(3, 0.1));
90  graph1.push_back(f1);
92 
93  // Test non-empty graphs
94  EXPECT(assert_equal(graph1, graph2));
95 }
96 
97 /***************************************************************************
98  * Test that the resize method works correctly for a HybridNonlinearFactorGraph.
99  */
102  auto nonlinearFactor = std::make_shared<BetweenFactor<double>>();
103  fg.push_back(nonlinearFactor);
104 
105  auto discreteFactor = std::make_shared<DecisionTreeFactor>();
106  fg.push_back(discreteFactor);
107 
108  auto dcFactor = std::make_shared<MixtureFactor>();
109  fg.push_back(dcFactor);
110 
111  EXPECT_LONGS_EQUAL(fg.size(), 3);
112 
113  fg.resize(0);
114  EXPECT_LONGS_EQUAL(fg.size(), 0);
115 }
116 
117 /***************************************************************************
118  * Test that the resize method works correctly for a
119  * HybridGaussianFactorGraph.
120  */
123  auto nonlinearFactor = std::make_shared<BetweenFactor<double>>(
124  X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1));
125  nhfg.push_back(nonlinearFactor);
126  auto discreteFactor = std::make_shared<DecisionTreeFactor>();
127  nhfg.push_back(discreteFactor);
128 
129  KeyVector contKeys = {X(0), X(1)};
130  auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0);
131  auto still = std::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model),
132  moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
133 
134  std::vector<MotionModel::shared_ptr> components = {still, moving};
135  auto dcFactor = std::make_shared<MixtureFactor>(
136  contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
137  nhfg.push_back(dcFactor);
138 
139  Values linearizationPoint;
140  linearizationPoint.insert<double>(X(0), 0);
141  linearizationPoint.insert<double>(X(1), 1);
142 
143  // Generate `HybridGaussianFactorGraph` by linearizing
144  HybridGaussianFactorGraph gfg = *nhfg.linearize(linearizationPoint);
145 
146  EXPECT_LONGS_EQUAL(gfg.size(), 3);
147 
148  gfg.resize(0);
149  EXPECT_LONGS_EQUAL(gfg.size(), 0);
150 }
151 
152 /***************************************************************************
153  * Test that the MixtureFactor reports correctly if the number of continuous
154  * keys provided do not match the keys in the factors.
155  */
157  auto nonlinearFactor = std::make_shared<BetweenFactor<double>>(
158  X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1));
159  auto discreteFactor = std::make_shared<DecisionTreeFactor>();
160 
161  auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0);
162  auto still = std::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model),
163  moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
164 
165  std::vector<MotionModel::shared_ptr> components = {still, moving};
166 
167  // Check for exception when number of continuous keys are under-specified.
168  KeyVector contKeys = {X(0)};
169  THROWS_EXCEPTION(std::make_shared<MixtureFactor>(
170  contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
171 
172  // Check for exception when number of continuous keys are too many.
173  contKeys = {X(0), X(1), X(2)};
174  THROWS_EXCEPTION(std::make_shared<MixtureFactor>(
175  contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
176 }
177 
178 /*****************************************************************************
179  * Test push_back on HFG makes the correct distinction.
180  */
183 
184  auto nonlinearFactor = std::make_shared<BetweenFactor<double>>();
185  fg.push_back(nonlinearFactor);
186 
187  EXPECT_LONGS_EQUAL(fg.size(), 1);
188 
190 
191  auto discreteFactor = std::make_shared<DecisionTreeFactor>();
192  fg.push_back(discreteFactor);
193 
194  EXPECT_LONGS_EQUAL(fg.size(), 1);
195 
197 
198  auto dcFactor = std::make_shared<MixtureFactor>();
199  fg.push_back(dcFactor);
200 
201  EXPECT_LONGS_EQUAL(fg.size(), 1);
202 
203  // Now do the same with HybridGaussianFactorGraph
205 
206  auto gaussianFactor = std::make_shared<JacobianFactor>();
207  ghfg.push_back(gaussianFactor);
208 
209  EXPECT_LONGS_EQUAL(ghfg.size(), 1);
210 
211  ghfg = HybridGaussianFactorGraph();
212  ghfg.push_back(discreteFactor);
213 
214  EXPECT_LONGS_EQUAL(ghfg.size(), 1);
215 
216  ghfg = HybridGaussianFactorGraph();
217  ghfg.push_back(dcFactor);
218 
220  hgfg2.push_back(ghfg.begin(), ghfg.end());
221 
222  EXPECT_LONGS_EQUAL(ghfg.size(), 1);
223 
226  auto noise = noiseModel::Isotropic::Sigma(3, 1.0);
227  factors.emplace_shared<PriorFactor<Pose2>>(0, Pose2(0, 0, 0), noise);
228  factors.emplace_shared<PriorFactor<Pose2>>(1, Pose2(1, 0, 0), noise);
229  factors.emplace_shared<PriorFactor<Pose2>>(2, Pose2(2, 0, 0), noise);
230  // TODO(Varun) This does not currently work. It should work once HybridFactor
231  // becomes a base class of NonlinearFactor.
232  // hnfg.push_back(factors.begin(), factors.end());
233 
234  // EXPECT_LONGS_EQUAL(3, hnfg.size());
235 }
236 
237 /****************************************************************************
238  * Test construction of switching-like hybrid factor graph.
239  */
241  Switching self(3);
242 
243  EXPECT_LONGS_EQUAL(7, self.nonlinearFactorGraph.size());
244  EXPECT_LONGS_EQUAL(7, self.linearizedFactorGraph.size());
245 }
246 
247 /****************************************************************************
248  * Test linearization on a switching-like hybrid factor graph.
249  */
250 TEST(HybridFactorGraph, Linearization) {
251  Switching self(3);
252 
253  // Linearize here:
254  HybridGaussianFactorGraph actualLinearized =
255  *self.nonlinearFactorGraph.linearize(self.linearizationPoint);
256 
257  EXPECT_LONGS_EQUAL(7, actualLinearized.size());
258 }
259 
260 /****************************************************************************
261  * Test elimination tree construction
262  */
264  Switching self(3);
265 
266  // Create ordering.
268  for (size_t k = 0; k < self.K; k++) ordering.push_back(X(k));
269 
270  // Create elimination tree.
271  HybridEliminationTree etree(self.linearizedFactorGraph, ordering);
272  EXPECT_LONGS_EQUAL(1, etree.roots().size())
273 }
274 
275 /****************************************************************************
276  *Test elimination function by eliminating x0 in *-x0-*-x1 graph.
277  */
278 TEST(GaussianElimination, Eliminate_x0) {
279  Switching self(3);
280 
281  // Gather factors on x1, has a simple Gaussian and a mixture factor.
283  // Add gaussian prior
284  factors.push_back(self.linearizedFactorGraph[0]);
285  // Add first hybrid factor
286  factors.push_back(self.linearizedFactorGraph[1]);
287 
288  // Eliminate x0
289  const Ordering ordering{X(0)};
290 
292  CHECK(result.first);
293  EXPECT_LONGS_EQUAL(1, result.first->nrFrontals());
294  CHECK(result.second);
295  // Has two keys, x2 and m1
296  EXPECT_LONGS_EQUAL(2, result.second->size());
297 }
298 
299 /****************************************************************************
300  * Test elimination function by eliminating x1 in x0-*-x1-*-x2 chain.
301  * m0/ \m1
302  */
303 TEST(HybridsGaussianElimination, Eliminate_x1) {
304  Switching self(3);
305 
306  // Gather factors on x1, will be two mixture factors (with x0 and x2, resp.).
308  factors.push_back(self.linearizedFactorGraph[1]); // involves m0
309  factors.push_back(self.linearizedFactorGraph[2]); // involves m1
310 
311  // Eliminate x1
312  const Ordering ordering{X(1)};
313 
315  CHECK(result.first);
316  EXPECT_LONGS_EQUAL(1, result.first->nrFrontals());
317  CHECK(result.second);
318  // Note: separator keys should include m1, m2.
319  EXPECT_LONGS_EQUAL(4, result.second->size());
320 }
321 
322 /****************************************************************************
323  * Helper method to generate gaussian factor graphs with a specific mode.
324  */
326  Values linearizationPoint) {
328  graph.addPrior<double>(X(0), 0, Isotropic::Sigma(1, 0.1));
329 
330  auto between_x0_x1 = std::make_shared<MotionModel>(X(0), X(1), between,
331  Isotropic::Sigma(1, 1.0));
332 
333  graph.push_back(between_x0_x1);
334 
335  return graph.linearize(linearizationPoint);
336 }
337 
338 /****************************************************************************
339  * Test elimination function by eliminating x0 and x1 in graph.
340  */
341 TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
342  Switching self(2, 1.0, 0.1);
343 
344  auto factors = self.linearizedFactorGraph;
345 
346  // Eliminate x0
347  const Ordering ordering{X(0), X(1)};
348 
349  const auto [hybridConditionalMixture, factorOnModes] =
351 
352  auto gaussianConditionalMixture =
353  dynamic_pointer_cast<GaussianMixture>(hybridConditionalMixture->inner());
354 
355  CHECK(gaussianConditionalMixture);
356  // Frontals = [x0, x1]
357  EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals());
358  // 1 parent, which is the mode
359  EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents());
360 
361  // This is now a discreteFactor
362  auto discreteFactor = dynamic_pointer_cast<DecisionTreeFactor>(factorOnModes);
363  CHECK(discreteFactor);
364  EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size());
365  EXPECT(discreteFactor->root_->isLeaf() == false);
366 }
367 
368 /****************************************************************************
369  * Test partial elimination
370  */
371 TEST(HybridFactorGraph, Partial_Elimination) {
372  Switching self(3);
373 
374  auto linearizedFactorGraph = self.linearizedFactorGraph;
375 
376  // Create ordering of only continuous variables.
378  for (size_t k = 0; k < self.K; k++) ordering.push_back(X(k));
379 
380  // Eliminate partially i.e. only continuous part.
381  const auto [hybridBayesNet, remainingFactorGraph] =
382  linearizedFactorGraph.eliminatePartialSequential(ordering);
383 
384  CHECK(hybridBayesNet);
385  EXPECT_LONGS_EQUAL(3, hybridBayesNet->size());
386  EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(0)});
387  EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(1), M(0)}));
388  EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(1)});
389  EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(2), M(0), M(1)}));
390  EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(2)});
391  EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(0), M(1)}));
392 
393  CHECK(remainingFactorGraph);
394  EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size());
395  EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(0)}));
396  EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(1), M(0)}));
397  EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(0), M(1)}));
398 }
399 
400 TEST(HybridFactorGraph, PrintErrors) {
401  Switching self(3);
402 
403  // Get nonlinear factor graph and add linear factors to be holistic
404  HybridNonlinearFactorGraph fg = self.nonlinearFactorGraph;
405  fg.add(self.linearizedFactorGraph);
406 
407  // Optimize to get HybridValues
409  self.linearizedFactorGraph.eliminateSequential();
410  HybridValues hv = bn->optimize();
411 
412  // Print and verify
413  // fg.print();
414  // std::cout << "\n\n\n" << std::endl;
415  // fg.printErrors(
416  // HybridValues(hv.continuous(), DiscreteValues(), self.linearizationPoint));
417 }
418 
419 /****************************************************************************
420  * Test full elimination
421  */
422 TEST(HybridFactorGraph, Full_Elimination) {
423  Switching self(3);
424 
425  auto linearizedFactorGraph = self.linearizedFactorGraph;
426 
427  // We first do a partial elimination
428  DiscreteBayesNet discreteBayesNet;
429 
430  {
431  // Create ordering.
433  for (size_t k = 0; k < self.K; k++) ordering.push_back(X(k));
434 
435  // Eliminate partially.
436  const auto [hybridBayesNet_partial, remainingFactorGraph_partial] =
437  linearizedFactorGraph.eliminatePartialSequential(ordering);
438 
439  DiscreteFactorGraph discrete_fg;
440  // TODO(Varun) Make this a function of HybridGaussianFactorGraph?
441  for (auto& factor : (*remainingFactorGraph_partial)) {
442  auto df = dynamic_pointer_cast<DiscreteFactor>(factor);
443  assert(df);
444  discrete_fg.push_back(df);
445  }
446 
447  ordering.clear();
448  for (size_t k = 0; k < self.K - 1; k++) ordering.push_back(M(k));
449  discreteBayesNet =
451  }
452 
453  // Create ordering.
455  for (size_t k = 0; k < self.K; k++) ordering.push_back(X(k));
456  for (size_t k = 0; k < self.K - 1; k++) ordering.push_back(M(k));
457 
458  // Eliminate partially.
459  HybridBayesNet::shared_ptr hybridBayesNet =
460  linearizedFactorGraph.eliminateSequential(ordering);
461 
462  CHECK(hybridBayesNet);
463  EXPECT_LONGS_EQUAL(5, hybridBayesNet->size());
464  // p(x0 | x1, m0)
465  EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(0)});
466  EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(1), M(0)}));
467  // p(x1 | x2, m0, m1)
468  EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(1)});
469  EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(2), M(0), M(1)}));
470  // p(x2 | m0, m1)
471  EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(2)});
472  EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(0), M(1)}));
473  // P(m0 | m1)
474  EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(0)});
475  EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(1)}));
476  EXPECT(
477  dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(3)->inner())
478  ->equals(*discreteBayesNet.at(0)));
479  // P(m1)
480  EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(1)});
481  EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents());
482  EXPECT(
483  dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(4)->inner())
484  ->equals(*discreteBayesNet.at(1)));
485 }
486 
487 /****************************************************************************
488  * Test printing
489  */
491  Switching self(3);
492 
493  auto linearizedFactorGraph = self.linearizedFactorGraph;
494 
495  // Create ordering.
497  for (size_t k = 0; k < self.K; k++) ordering.push_back(X(k));
498 
499  // Eliminate partially.
500  const auto [hybridBayesNet, remainingFactorGraph] =
501  linearizedFactorGraph.eliminatePartialSequential(ordering);
502 
503 #ifdef GTSAM_DT_MERGING
504  string expected_hybridFactorGraph = R"(
505 size: 7
506 factor 0:
507  A[x0] = [
508  10
509 ]
510  b = [ -10 ]
511  No noise model
512 factor 1:
513 Hybrid [x0 x1; m0]{
514  Choice(m0)
515  0 Leaf :
516  A[x0] = [
517  -1
518 ]
519  A[x1] = [
520  1
521 ]
522  b = [ -1 ]
523  No noise model
524 
525  1 Leaf :
526  A[x0] = [
527  -1
528 ]
529  A[x1] = [
530  1
531 ]
532  b = [ -0 ]
533  No noise model
534 
535 }
536 factor 2:
537 Hybrid [x1 x2; m1]{
538  Choice(m1)
539  0 Leaf :
540  A[x1] = [
541  -1
542 ]
543  A[x2] = [
544  1
545 ]
546  b = [ -1 ]
547  No noise model
548 
549  1 Leaf :
550  A[x1] = [
551  -1
552 ]
553  A[x2] = [
554  1
555 ]
556  b = [ -0 ]
557  No noise model
558 
559 }
560 factor 3:
561  A[x1] = [
562  10
563 ]
564  b = [ -10 ]
565  No noise model
566 factor 4:
567  A[x2] = [
568  10
569 ]
570  b = [ -10 ]
571  No noise model
572 factor 5: P( m0 ):
573  Leaf 0.5
574 
575 factor 6: P( m1 | m0 ):
576  Choice(m1)
577  0 Choice(m0)
578  0 0 Leaf 0.33333333
579  0 1 Leaf 0.6
580  1 Choice(m0)
581  1 0 Leaf 0.66666667
582  1 1 Leaf 0.4
583 
584 )";
585 #else
586  string expected_hybridFactorGraph = R"(
587 size: 7
588 factor 0:
589  A[x0] = [
590  10
591 ]
592  b = [ -10 ]
593  No noise model
594 factor 1:
595 Hybrid [x0 x1; m0]{
596  Choice(m0)
597  0 Leaf:
598  A[x0] = [
599  -1
600 ]
601  A[x1] = [
602  1
603 ]
604  b = [ -1 ]
605  No noise model
606 
607  1 Leaf:
608  A[x0] = [
609  -1
610 ]
611  A[x1] = [
612  1
613 ]
614  b = [ -0 ]
615  No noise model
616 
617 }
618 factor 2:
619 Hybrid [x1 x2; m1]{
620  Choice(m1)
621  0 Leaf:
622  A[x1] = [
623  -1
624 ]
625  A[x2] = [
626  1
627 ]
628  b = [ -1 ]
629  No noise model
630 
631  1 Leaf:
632  A[x1] = [
633  -1
634 ]
635  A[x2] = [
636  1
637 ]
638  b = [ -0 ]
639  No noise model
640 
641 }
642 factor 3:
643  A[x1] = [
644  10
645 ]
646  b = [ -10 ]
647  No noise model
648 factor 4:
649  A[x2] = [
650  10
651 ]
652  b = [ -10 ]
653  No noise model
654 factor 5: P( m0 ):
655  Choice(m0)
656  0 Leaf 0.5
657  1 Leaf 0.5
658 
659 factor 6: P( m1 | m0 ):
660  Choice(m1)
661  0 Choice(m0)
662  0 0 Leaf 0.33333333
663  0 1 Leaf 0.6
664  1 Choice(m0)
665  1 0 Leaf 0.66666667
666  1 1 Leaf 0.4
667 
668 )";
669 #endif
670 
671  EXPECT(assert_print_equal(expected_hybridFactorGraph, linearizedFactorGraph));
672 
673  // Expected output for hybridBayesNet.
674  string expected_hybridBayesNet = R"(
675 size: 3
676 conditional 0: Hybrid P( x0 | x1 m0)
677  Discrete Keys = (m0, 2),
678  Choice(m0)
679  0 Leaf p(x0 | x1)
680  R = [ 10.0499 ]
681  S[x1] = [ -0.0995037 ]
682  d = [ -9.85087 ]
683  No noise model
684 
685  1 Leaf p(x0 | x1)
686  R = [ 10.0499 ]
687  S[x1] = [ -0.0995037 ]
688  d = [ -9.95037 ]
689  No noise model
690 
691 conditional 1: Hybrid P( x1 | x2 m0 m1)
692  Discrete Keys = (m0, 2), (m1, 2),
693  Choice(m1)
694  0 Choice(m0)
695  0 0 Leaf p(x1 | x2)
696  R = [ 10.099 ]
697  S[x2] = [ -0.0990196 ]
698  d = [ -9.99901 ]
699  No noise model
700 
701  0 1 Leaf p(x1 | x2)
702  R = [ 10.099 ]
703  S[x2] = [ -0.0990196 ]
704  d = [ -9.90098 ]
705  No noise model
706 
707  1 Choice(m0)
708  1 0 Leaf p(x1 | x2)
709  R = [ 10.099 ]
710  S[x2] = [ -0.0990196 ]
711  d = [ -10.098 ]
712  No noise model
713 
714  1 1 Leaf p(x1 | x2)
715  R = [ 10.099 ]
716  S[x2] = [ -0.0990196 ]
717  d = [ -10 ]
718  No noise model
719 
720 conditional 2: Hybrid P( x2 | m0 m1)
721  Discrete Keys = (m0, 2), (m1, 2),
722  Choice(m1)
723  0 Choice(m0)
724  0 0 Leaf p(x2)
725  R = [ 10.0494 ]
726  d = [ -10.1489 ]
727  mean: 1 elements
728  x2: -1.0099
729  No noise model
730 
731  0 1 Leaf p(x2)
732  R = [ 10.0494 ]
733  d = [ -10.1479 ]
734  mean: 1 elements
735  x2: -1.0098
736  No noise model
737 
738  1 Choice(m0)
739  1 0 Leaf p(x2)
740  R = [ 10.0494 ]
741  d = [ -10.0504 ]
742  mean: 1 elements
743  x2: -1.0001
744  No noise model
745 
746  1 1 Leaf p(x2)
747  R = [ 10.0494 ]
748  d = [ -10.0494 ]
749  mean: 1 elements
750  x2: -1
751  No noise model
752 
753 )";
754  EXPECT(assert_print_equal(expected_hybridBayesNet, *hybridBayesNet));
755 }
756 
757 /****************************************************************************
758  * Simple PlanarSLAM example test with 2 poses and 2 landmarks (each pose
759  * connects to 1 landmark) to expose issue with default decision tree creation
760  * in hybrid elimination. The hybrid factor is between the poses X0 and X1.
761  * The issue arises if we eliminate a landmark variable first since it is not
762  * connected to a HybridFactor.
763  */
764 TEST(HybridFactorGraph, DefaultDecisionTree) {
766 
767  // Add a prior on pose x0 at the origin.
768  // A prior factor consists of a mean and a noise model (covariance matrix)
769  Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
770  auto priorNoise = noiseModel::Diagonal::Sigmas(
771  Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta
773 
774  using PlanarMotionModel = BetweenFactor<Pose2>;
775 
776  // Add odometry factor
777  Pose2 odometry(2.0, 0.0, 0.0);
778  KeyVector contKeys = {X(0), X(1)};
779  auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
780  auto still = std::make_shared<PlanarMotionModel>(X(0), X(1), Pose2(0, 0, 0),
781  noise_model),
782  moving = std::make_shared<PlanarMotionModel>(X(0), X(1), odometry,
783  noise_model);
784  std::vector<PlanarMotionModel::shared_ptr> motion_models = {still, moving};
786  contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models);
787 
788  // Add Range-Bearing measurements to from X0 to L0 and X1 to L1.
789  // create a noise model for the landmark measurements
790  auto measurementNoise = noiseModel::Diagonal::Sigmas(
791  Vector2(0.1, 0.2)); // 0.1 rad std on bearing, 20cm on range
792  // create the measurement values - indices are (pose id, landmark id)
793  Rot2 bearing11 = Rot2::fromDegrees(45), bearing22 = Rot2::fromDegrees(90);
794  double range11 = std::sqrt(4.0 + 4.0), range22 = 2.0;
795 
796  // Add Bearing-Range factors
798  X(0), L(0), bearing11, range11, measurementNoise);
800  X(1), L(1), bearing22, range22, measurementNoise);
801 
802  // Create (deliberately inaccurate) initial estimate
803  Values initialEstimate;
804  initialEstimate.insert(X(0), Pose2(0.5, 0.0, 0.2));
805  initialEstimate.insert(X(1), Pose2(2.3, 0.1, -0.2));
806  initialEstimate.insert(L(0), Point2(1.8, 2.1));
807  initialEstimate.insert(L(1), Point2(4.1, 1.8));
808 
809  // We want to eliminate variables not connected to DCFactors first.
810  const Ordering ordering{L(0), L(1), X(0), X(1)};
811 
812  HybridGaussianFactorGraph linearized = *fg.linearize(initialEstimate);
813 
814  // This should NOT fail
815  const auto [hybridBayesNet, remainingFactorGraph] =
817  EXPECT_LONGS_EQUAL(4, hybridBayesNet->size());
818  EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size());
819 }
820 
821 /* ************************************************************************* */
822 int main() {
823  TestResult tr;
824  return TestRegistry::runAllTests(tr);
825 }
826 /* ************************************************************************* */
TestRegistry::runAllTests
static int runAllTests(TestResult &result)
Definition: TestRegistry.cpp:27
gtsam::EliminateableFactorGraph::eliminateSequential
std::shared_ptr< BayesNetType > eliminateSequential(OptionalOrderingType orderingType={}, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Definition: EliminateableFactorGraph-inst.h:29
Pose2.h
2D Pose
gtsam::HybridFactorGraph
Definition: HybridFactorGraph.h:38
DiscreteBayesNet.h
GaussianFactorGraph.h
Linear Factor Graph where all factors are Gaussians.
gtsam::HybridValues
Definition: HybridValues.h:38
gtsam::GaussianFactorGraph::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianFactorGraph.h:82
gtsam::DiscreteFactorGraph
Definition: DiscreteFactorGraph.h:98
gtsam::Values::size
size_t size() const
Definition: Values.h:178
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
gtsam::FactorGraph::error
double error(const HybridValues &values) const
Definition: FactorGraph-inst.h:66
EXPECT_LONGS_EQUAL
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
EXPECT
#define EXPECT(condition)
Definition: Test.h:150
gtsam::Vector2
Eigen::Vector2d Vector2
Definition: Vector.h:42
HybridNonlinearFactorGraph.h
Nonlinear hybrid factor graph that uses type erasure.
TestHarness.h
DiscreteFactorGraph.h
gtsam::HybridNonlinearFactorGraph
Definition: HybridNonlinearFactorGraph.h:33
Switching.h
gtsam::assert_print_equal
bool assert_print_equal(const std::string &expected, const V &actual, const std::string &s="")
Definition: TestableAssertions.h:352
simple_graph::factors
const GaussianFactorGraph factors
Definition: testJacobianFactor.cpp:213
simple::graph2
NonlinearFactorGraph graph2()
Definition: testInitializePose3.cpp:72
gtsam::PriorFactor
Definition: nonlinear/PriorFactor.h:30
gtsam::NonlinearFactorGraph::linearize
std::shared_ptr< GaussianFactorGraph > linearize(const Values &linearizationPoint) const
Linearize a nonlinear factor graph.
Definition: NonlinearFactorGraph.cpp:239
main
int main()
Definition: testHybridNonlinearFactorGraph.cpp:822
X
#define X
Definition: icosphere.cpp:20
gtsam::DiscreteKeys
DiscreteKeys is a set of keys that can be assembled using the & operator.
Definition: DiscreteKey.h:41
GaussianBayesNet.h
Chordal Bayes Net, the result of eliminating a factor graph.
gtsam::Vector3
Eigen::Vector3d Vector3
Definition: Vector.h:43
gtsam::EliminationTree
Definition: EliminationTree.h:51
gtsam::KeyVector
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:92
result
Values result
Definition: OdometryOptimize.cpp:8
utilities.h
TestableAssertions.h
Provides additional testing facilities for common data structures.
gtsam::EliminationTree::roots
const FastVector< sharedNode > & roots() const
Definition: EliminationTree.h:152
gtsam::Switching
Definition: Switching.h:120
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
odometry
Pose2 odometry(2.0, 0.0, 0.0)
gtsam::GaussianFactorGraph
Definition: GaussianFactorGraph.h:73
Vector1
Eigen::Matrix< double, 1, 1 > Vector1
Definition: testEvent.cpp:33
PriorFactor.h
gtsam::FactorGraph::resize
virtual void resize(size_t size)
Definition: FactorGraph.h:367
HybridFactor.h
gtsam::DiscreteBayesNet
Definition: DiscreteBayesNet.h:38
gtsam::VectorValues
Definition: VectorValues.h:74
BetweenFactor.h
gtsam::noiseModel::Isotropic
Definition: NoiseModel.h:527
gtsam::EliminateableFactorGraph::eliminatePartialSequential
std::pair< std::shared_ptr< BayesNetType >, std::shared_ptr< FactorGraphType > > eliminatePartialSequential(const Ordering &ordering, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Definition: EliminateableFactorGraph-inst.h:151
gtsam::EliminateDiscrete
std::pair< DiscreteConditional::shared_ptr, DecisionTreeFactor::shared_ptr > EliminateDiscrete(const DiscreteFactorGraph &factors, const Ordering &frontalKeys)
Main elimination function for DiscreteFactorGraph.
Definition: DiscreteFactorGraph.cpp:205
gtsam::BearingRangeFactor
Definition: sam/BearingRangeFactor.h:34
gtsam::NonlinearFactorGraph
Definition: NonlinearFactorGraph.h:55
zero
EIGEN_DONT_INLINE Scalar zero()
Definition: svd_common.h:296
gtsam::NonlinearFactorGraph::addPrior
void addPrior(Key key, const T &prior, const SharedNoiseModel &model=nullptr)
Definition: NonlinearFactorGraph.h:199
gtsam::HybridGaussianFactorGraph
Definition: HybridGaussianFactorGraph.h:104
L
MatrixXd L
Definition: LLT_example.cpp:6
gtsam::Point2
Vector2 Point2
Definition: Point2.h:32
gtsam::EliminateHybrid
std::pair< HybridConditional::shared_ptr, std::shared_ptr< Factor > > EliminateHybrid(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys)
Main elimination function for HybridGaussianFactorGraph.
Definition: HybridGaussianFactorGraph.cpp:390
priorNoise
auto priorNoise
Definition: doc/Code/OdometryExample.cpp:6
EXPECT_DOUBLES_EQUAL
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
MixtureFactor.h
Nonlinear Mixture factor of continuous and discrete.
gtsam::HybridEliminationTree
Definition: HybridEliminationTree.h:31
ordering
static enum @1096 ordering
TestResult
Definition: TestResult.h:26
gtsam::FactorGraph::size
size_t size() const
Definition: FactorGraph.h:297
batchGFG
GaussianFactorGraph::shared_ptr batchGFG(double between, Values linearizationPoint)
Definition: testHybridNonlinearFactorGraph.cpp:325
HybridEliminationTree.h
simulated2D::prior
Point2 prior(const Point2 &x)
Prior on a single pose.
Definition: simulated2D.h:88
gtsam::Rot2
Definition: Rot2.h:35
TEST
TEST(HybridFactorGraph, GaussianFactorGraph)
Definition: testHybridNonlinearFactorGraph.cpp:53
gtsam
traits
Definition: chartTesting.h:28
NonlinearFactorGraph.h
Factor Graph consisting of non-linear factors.
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
DiscreteDistribution.h
gtsam::Values
Definition: Values.h:65
CHECK
#define CHECK(condition)
Definition: Test.h:108
gtsam::DiscreteKey
std::pair< Key, size_t > DiscreteKey
Definition: DiscreteKey.h:38
std
Definition: BFloat16.h:88
gtsam::FactorGraph::end
const_iterator end() const
Definition: FactorGraph.h:342
gtsam::Values::insert
void insert(Key j, const Value &val)
Definition: Values.cpp:155
gtsam::HybridNonlinearFactorGraph::linearize
std::shared_ptr< HybridGaussianFactorGraph > linearize(const Values &continuousValues) const
Linearize all the continuous factors in the HybridNonlinearFactorGraph.
Definition: HybridNonlinearFactorGraph.cpp:138
gtsam::testing::between
T between(const T &t1, const T &t2)
Definition: lieProxies.h:36
gtsam::assert_equal
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:40
gtsam::FactorGraph::begin
const_iterator begin() const
Definition: FactorGraph.h:339
gtsam::Switching::linearizedFactorGraph
HybridGaussianFactorGraph linearizedFactorGraph
Definition: Switching.h:124
gtsam::MixtureFactor
Implementation of a discrete conditional mixture factor.
Definition: MixtureFactor.h:47
THROWS_EXCEPTION
#define THROWS_EXCEPTION(condition)
Definition: Test.h:112
unary::f1
Point2 f1(const Point3 &p, OptionalJacobian< 2, 3 > H)
Definition: testExpression.cpp:79
gtsam::HybridBayesNet::shared_ptr
std::shared_ptr< HybridBayesNet > shared_ptr
Definition: HybridBayesNet.h:40
gtsam::FactorGraph::add
IsDerived< DERIVEDFACTOR > add(std::shared_ptr< DERIVEDFACTOR > factor)
add is a synonym for push_back.
Definition: FactorGraph.h:171
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
gtsam::Ordering
Definition: inference/Ordering.h:33
ceres::sqrt
Jet< T, N > sqrt(const Jet< T, N > &f)
Definition: jet.h:418
gtsam::FactorGraph::emplace_shared
IsDerived< DERIVEDFACTOR > emplace_shared(Args &&... args)
Emplace a shared pointer to factor of given type.
Definition: FactorGraph.h:153
gtsam::Pose2
Definition: Pose2.h:39
BearingRangeFactor.h
a single factor contains both the bearing and the range to prevent handle to pair bearing and range f...
gtsam::BetweenFactor
Definition: BetweenFactor.h:40
M
Matrix< RealScalar, Dynamic, Dynamic > M
Definition: bench_gemm.cpp:51


gtsam
Author(s):
autogenerated on Tue Jun 25 2024 03:05:58