testHybridSmoother.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
22 #include <gtsam/inference/Symbol.h>
31 
32 // Include for test suite
34 
35 #include <string>
36 
37 #include "Switching.h"
38 
39 using namespace std;
40 using namespace gtsam;
41 
44 
45 namespace estimation_fixture {
46 std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6,
47  7, 8, 9, 9, 9, 10, 11, 11, 11, 11};
48 // Ground truth discrete seq
49 std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0,
50  1, 1, 1, 0, 0, 1, 1, 0, 0, 0};
51 
53  const size_t K, const double between_sigma, const double measurement_sigma,
54  const std::vector<double>& measurements,
55  const std::string& transitionProbabilityTable,
57  Switching switching(K, between_sigma, measurement_sigma, measurements,
58  transitionProbabilityTable);
59 
60  // Add prior on M(0)
62 
63  // Add the X(0) prior
65  initial->insert(X(0), switching.linearizationPoint.at<double>(X(0)));
66 
67  return switching;
68 }
69 
70 } // namespace estimation_fixture
71 
72 /****************************************************************************/
73 // Test approximate inference with an additional pruning step.
74 TEST(HybridSmoother, IncrementalSmoother) {
75  using namespace estimation_fixture;
76 
77  size_t K = 5;
78 
79  // Switching example of robot moving in 1D
80  // with given measurements and equal mode priors.
84  K, 1.0, 0.1, measurements, "1/1 1/1", &graph, &initial);
85 
86  HybridSmoother smoother;
87  constexpr size_t maxNrLeaves = 5;
88 
89  // Loop over timesteps from 1...K-1
90  for (size_t k = 1; k < K; k++) {
91  if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
92  graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
93  graph.push_back(switching.unaryFactors.at(k)); // Measurement
94 
95  initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
96 
97  smoother.update(graph, initial, maxNrLeaves);
98 
99  // Clear all the factors from the graph
100  graph.resize(0);
101  }
102 
103  auto& hybridBayesNet = smoother.hybridBayesNet();
104 #ifdef GTSAM_DT_MERGING
105  EXPECT_LONGS_EQUAL(11, hybridBayesNet.at(5)->asDiscrete()->nrValues());
106 #else
107  EXPECT_LONGS_EQUAL(16, hybridBayesNet.at(5)->asDiscrete()->nrValues());
108 #endif
109 
110  // Get the continuous delta update as well as
111  // the optimal discrete assignment.
112  HybridValues delta = hybridBayesNet.optimize();
113 
114  // Check discrete assignment
115  DiscreteValues expected_discrete;
116  for (size_t k = 0; k < K - 1; k++) {
117  expected_discrete[M(k)] = discrete_seq[k];
118  }
119  EXPECT(assert_equal(expected_discrete, delta.discrete()));
120 
121  // Update nonlinear solution and verify
122  Values result = initial.retract(delta.continuous());
123  Values expected_continuous;
124  for (size_t k = 0; k < K; k++) {
125  expected_continuous.insert(X(k), measurements[k]);
126  }
127  EXPECT(assert_equal(expected_continuous, result));
128 }
129 
130 /****************************************************************************/
131 // Test if pruned Bayes net is set to correct error and no errors are thrown.
132 TEST(HybridSmoother, ValidPruningError) {
133  using namespace estimation_fixture;
134 
135  size_t K = 8;
136 
137  // Switching example of robot moving in 1D
138  // with given measurements and equal mode priors.
140  Values initial;
142  K, 0.1, 0.1, measurements, "1/1 1/1", &graph, &initial);
143  HybridSmoother smoother;
144 
145  constexpr size_t maxNrLeaves = 3;
146  for (size_t k = 1; k < K; k++) {
147  if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
148  graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
149  graph.push_back(switching.unaryFactors.at(k)); // Measurement
150 
151  initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
152 
153  smoother.update(graph, initial, maxNrLeaves);
154 
155  // Clear all the factors from the graph
156  graph.resize(0);
157  }
158 
159  auto& hybridBayesNet = smoother.hybridBayesNet();
160 #ifdef GTSAM_DT_MERGING
161  EXPECT_LONGS_EQUAL(14, hybridBayesNet.at(8)->asDiscrete()->nrValues());
162 #else
163  EXPECT_LONGS_EQUAL(128, hybridBayesNet.at(8)->asDiscrete()->nrValues());
164 #endif
165 
166  // Get the continuous delta update as well as
167  // the optimal discrete assignment.
168  HybridValues delta = smoother.hybridBayesNet().optimize();
169 
170  auto errorTree = smoother.hybridBayesNet().errorTree(delta.continuous());
171  EXPECT_DOUBLES_EQUAL(1e-8, errorTree(delta.discrete()), 1e-8);
172 }
173 
174 /****************************************************************************/
175 // Test if dead mode removal works.
176 TEST(HybridSmoother, DeadModeRemoval) {
177  using namespace estimation_fixture;
178 
179  size_t K = 8;
180 
181  // Switching example of robot moving in 1D
182  // with given measurements and equal mode priors.
184  Values initial;
186  K, 0.1, 0.1, measurements, "1/1 1/1", &graph, &initial);
187 
188  // Smoother with dead mode removal enabled.
189  HybridSmoother smoother(true);
190 
191  constexpr size_t maxNrLeaves = 3;
192  for (size_t k = 1; k < K; k++) {
193  if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
194  graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
195  graph.push_back(switching.unaryFactors.at(k)); // Measurement
196 
197  initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
198 
199  smoother.update(graph, initial, maxNrLeaves);
200 
201  // Clear all the factors from the graph
202  graph.resize(0);
203  }
204 
205  // Get the continuous delta update as well as
206  // the optimal discrete assignment.
207  HybridValues delta = smoother.hybridBayesNet().optimize();
208 
209  // Check discrete assignment
210  DiscreteValues expected_discrete;
211  for (size_t k = 0; k < K - 1; k++) {
212  expected_discrete[M(k)] = discrete_seq[k];
213  }
214  EXPECT(assert_equal(expected_discrete, delta.discrete()));
215 
216  // Update nonlinear solution and verify
217  Values result = initial.retract(delta.continuous());
218  Values expected_continuous;
219  for (size_t k = 0; k < K; k++) {
220  expected_continuous.insert(X(k), measurements[k]);
221  }
222  EXPECT(assert_equal(expected_continuous, result));
223 }
224 
225 /* ************************************************************************* */
226 int main() {
227  TestResult tr;
228  return TestRegistry::runAllTests(tr);
229 }
230 /* ************************************************************************* */
TestRegistry::runAllTests
static int runAllTests(TestResult &result)
Definition: TestRegistry.cpp:27
gtsam::HybridSmoother::hybridBayesNet
const HybridBayesNet & hybridBayesNet() const
Return the Bayes Net posterior.
Definition: HybridSmoother.cpp:273
switching3::switching
const Switching switching(3)
DiscreteBayesNet.h
GaussianFactorGraph.h
Linear Factor Graph where all factors are Gaussians.
gtsam::HybridValues
Definition: HybridValues.h:37
gtsam::HybridSmoother::update
void update(const HybridNonlinearFactorGraph &graph, const Values &initial, std::optional< size_t > maxNrLeaves={}, const std::optional< Ordering > givenOrdering={})
Definition: HybridSmoother.cpp:99
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
EXPECT_LONGS_EQUAL
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
EXPECT
#define EXPECT(condition)
Definition: Test.h:150
HybridNonlinearFactorGraph.h
Nonlinear hybrid factor graph that uses type erasure.
TestHarness.h
gtsam::HybridNonlinearFactorGraph
Definition: HybridNonlinearFactorGraph.h:33
main
int main()
Definition: testHybridSmoother.cpp:226
initial
Values initial
Definition: OdometryOptimize.cpp:2
Switching.h
estimation_fixture::discrete_seq
std::vector< size_t > discrete_seq
Definition: testHybridEstimation.cpp:55
X
#define X
Definition: icosphere.cpp:20
GaussianBayesNet.h
Chordal Bayes Net, the result of eliminating a factor graph.
result
Values result
Definition: OdometryOptimize.cpp:8
HybridSmoother.h
An incremental smoother for hybrid factor graphs.
gtsam::Switching
ϕ(X(0)) .. ϕ(X(k),X(k+1)) .. ϕ(X(k);z_k) .. ϕ(M(0)) .. ϕ(M(K-3),M(K-2))
Definition: Switching.h:124
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
gtsam::HybridBayesNet::optimize
HybridValues optimize() const
Solve the HybridBayesNet by first computing the MPE of all the discrete variables and then optimizing...
Definition: HybridBayesNet.cpp:163
gtsam::Values::at
const ValueType at(Key j) const
Definition: Values-inl.h:261
PriorFactor.h
gtsam::FactorGraph::resize
virtual void resize(size_t size)
Definition: FactorGraph.h:367
gtsam.examples.PlanarManipulatorExample.delta
def delta(g0, g1)
Definition: PlanarManipulatorExample.py:45
BetweenFactor.h
gtsam::HybridBayesNet::errorTree
AlgebraicDecisionTree< Key > errorTree(const VectorValues &continuousValues) const
Compute the negative log posterior log P'(M|x) of all assignments up to a constant,...
Definition: HybridBayesNet.cpp:219
Symbol.h
estimation_fixture
Definition: testHybridEstimation.cpp:51
EXPECT_DOUBLES_EQUAL
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
TestResult
Definition: TestResult.h:26
JacobianFactor.h
estimation_fixture::measurements
std::vector< double > measurements
Definition: testHybridEstimation.cpp:52
gtsam::HybridSmoother
Definition: HybridSmoother.h:28
gtsam::Switching::modeChain
HybridNonlinearFactorGraph modeChain
Definition: Switching.h:128
gtsam
traits
Definition: SFMdata.h:40
NoiseModel.h
K
#define K
Definition: igam.h:8
NonlinearFactorGraph.h
Factor Graph consisting of non-linear factors.
gtsam::DiscreteValues
Definition: DiscreteValues.h:34
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
gtsam::Values
Definition: Values.h:65
std
Definition: BFloat16.h:88
gtsam::Switching::linearizationPoint
Values linearizationPoint
Definition: Switching.h:130
gtsam::Values::insert
void insert(Key j, const Value &val)
Definition: Values.cpp:156
GaussianBayesTree.h
Gaussian Bayes Tree, the result of eliminating a GaussianJunctionTree.
gtsam::assert_equal
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:41
HybridNonlinearISAM.h
initial
Definition: testScenarioRunner.cpp:148
gtsam::Switching::binaryFactors
HybridNonlinearFactorGraph binaryFactors
Definition: Switching.h:128
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
Z
#define Z
Definition: icosphere.cpp:21
estimation_fixture::InitializeEstimationProblem
Switching InitializeEstimationProblem(const size_t K, const double between_sigma, const double measurement_sigma, const std::vector< double > &measurements, const std::string &transitionProbabilityTable, HybridNonlinearFactorGraph *graph, Values *initial)
Definition: testHybridSmoother.cpp:52
gtsam::Switching::unaryFactors
HybridNonlinearFactorGraph unaryFactors
Definition: Switching.h:128
TEST
TEST(HybridSmoother, IncrementalSmoother)
Definition: testHybridSmoother.cpp:74
M
Matrix< RealScalar, Dynamic, Dynamic > M
Definition: bench_gemm.cpp:51


gtsam
Author(s):
autogenerated on Wed Mar 19 2025 03:06:50