elaboratePoint2KalmanFilter.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
25 #include <gtsam/inference/Symbol.h>
29 #include <gtsam/geometry/Point2.h>
30 #include <gtsam/base/Vector.h>
31 
32 #include <cassert>
33 
34 using namespace std;
35 using namespace gtsam;
37 
38 int main() {
39 
40  // [code below basically does SRIF with Cholesky]
41 
42  // Create a factor graph to perform the inference
44 
45  // Create the desired ordering
47 
48  // Create a structure to hold the linearization points
49  Values linearizationPoints;
50 
51  // Ground truth example
52  // Start at origin, move to the right (x-axis): 0,0 0,1 0,2
53  // Motion model is just moving to the right (x'-x)^2
54  // Measurements are GPS like, (x-z)^2, where z is a 2D measurement
55  // i.e., we should get 0,0 0,1 0,2 if there is no noise
56 
57  // Create new state variable
58  ordering->push_back(X(0));
59 
60  // Initialize state x0 (2D point) at origin by adding a prior factor, i.e., Bayes net P(x0)
61  // This is equivalent to x_0 and P_0
62  Point2 x_initial(0,0);
63  SharedDiagonal P_initial = noiseModel::Isotropic::Sigma(2, 0.1);
64 
65  // Linearize the factor and add it to the linear factor graph
66  linearizationPoints.insert(X(0), x_initial);
67  linearFactorGraph->add(X(0),
68  P_initial->R(),
69  Vector::Zero(2),
70  noiseModel::Unit::Create(2));
71 
72  // Now predict the state at t=1, i.e. argmax_{x1} P(x1) = P(x1|x0) P(x0)
73  // In Kalman Filter notation, this is x_{t+1|t} and P_{t+1|t}
74  // For the Kalman Filter, this requires a motion model, f(x_{t}) = x_{t+1|t)
75  // Assuming the system is linear, this will be of the form f(x_{t}) = F*x_{t} + B*u_{t} + w
76  // where F is the state transition model/matrix, B is the control input model,
77  // and w is zero-mean, Gaussian white noise with covariance Q
78  // Note, in some models, Q is actually derived as G*w*G^T where w models uncertainty of some
79  // physical property, such as velocity or acceleration, and G is derived from physics
80  //
81  // For the purposes of this example, let us assume we are using a constant-position model and
82  // the controls are driving the point to the right at 1 m/s. Then, F = [1 0 ; 0 1], B = [1 0 ; 0 1]
83  // and u = [1 ; 0]. Let us also assume that the process noise Q = [0.1 0 ; 0 0.1];
84  //
85  // In the case of factor graphs, the factor related to the motion model would be defined as
86  // f2 = (f(x_{t}) - x_{t+1}) * Q^-1 * (f(x_{t}) - x_{t+1})^T
87  // Conveniently, there is a factor type, called a BetweenFactor, that can generate this factor
88  // given the expected difference, f(x_{t}) - x_{t+1}, and Q.
89  // so, difference = x_{t+1} - x_{t} = F*x_{t} + B*u_{t} - I*x_{t}
90  // = (F - I)*x_{t} + B*u_{t}
91  // = B*u_{t} (for our example)
92  ordering->push_back(X(1));
93 
94  Point2 difference(1,0);
95  SharedDiagonal Q = noiseModel::Isotropic::Sigma(2, 0.1);
96  BetweenFactor<Point2> factor2(X(0), X(1), difference, Q);
97  // Linearize the factor and add it to the linear factor graph
98  linearizationPoints.insert(X(1), x_initial);
99  linearFactorGraph->push_back(factor2.linearize(linearizationPoints));
100 
101  // We have now made the small factor graph f1-(x0)-f2-(x1)
102  // where factor f1 is just the prior from time t0, P(x0)
103  // and factor f2 is from the motion model
104  // Eliminate this in order x0, x1, to get Bayes net P(x0|x1)P(x1)
105  // As this is a filter, all we need is the posterior P(x1), so we just keep the root of the Bayes net
106  //
107  // Because of the way GTSAM works internally, we have used nonlinear class even though this example
108  // system is linear. We first convert the nonlinear factor graph into a linear one, using the specified
109  // ordering. Linear factors are simply numbered, and are not accessible via named key like the nonlinear
110  // variables. Also, the nonlinear factors are linearized around an initial estimate. For a true linear
111  // system, the initial estimate is not important.
112 
113  // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) )
114  GaussianBayesNet::shared_ptr bayesNet = linearFactorGraph->eliminateSequential(*ordering);
115  const GaussianConditional::shared_ptr& x1Conditional = bayesNet->back(); // This should be P(x1)
116 
117  // Extract the current estimate of x1,P1 from the Bayes Network
118  VectorValues result = bayesNet->optimize();
119  Point2 x1_predict = linearizationPoints.at<Point2>(X(1)) + result[X(1)];
120  traits<Point2>::Print(x1_predict, "X1 Predict");
121 
122  // Update the new linearization point to the new estimate
123  linearizationPoints.update(X(1), x1_predict);
124 
125 
126 
127  // Create a new, empty graph and add the prior from the previous step
128  linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph);
129 
130  // Convert the root conditional, P(x1) in this case, into a Prior for the next step
131  // Some care must be done here, as the linearization point in future steps will be different
132  // than what was used when the factor was created.
133  // f = || F*dx1' - (F*x0 - x1) ||^2, originally linearized at x1 = x0
134  // After this step, the factor needs to be linearized around x1 = x1_predict
135  // This changes the factor to f = || F*dx1'' - b'' ||^2
136  // = || F*(dx1' - (dx1' - dx1'')) - b'' ||^2
137  // = || F*dx1' - F*(dx1' - dx1'') - b'' ||^2
138  // = || F*dx1' - (b'' + F(dx1' - dx1'')) ||^2
139  // -> b' = b'' + F(dx1' - dx1'')
140  // -> b'' = b' - F(dx1' - dx1'')
141  // = || F*dx1'' - (b' - F(dx1' - dx1'')) ||^2
142  // = || F*dx1'' - (b' - F(x_predict - x_inital)) ||^2
143 
144  // Create a new, empty graph and add the new prior
145  linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph);
146  linearFactorGraph->add(
147  X(1),
148  x1Conditional->R(),
149  x1Conditional->d() - x1Conditional->R() * result[X(1)],
150  x1Conditional->get_model());
151 
152  // Reset ordering for the next step
154  ordering->push_back(X(1));
155 
156  // Now, a measurement, z1, has been received, and the Kalman Filter should be "Updated"/"Corrected"
157  // This is equivalent to saying P(x1|z1) ~ P(z1|x1)*P(x1) ~ f3(x1)*f4(x1;z1)
158  // where f3 is the prior from the previous step, and
159  // where f4 is a measurement factor
160  //
161  // So, now we need to create the measurement factor, f4
162  // For the Kalman Filter, this is the measurement function, h(x_{t}) = z_{t}
163  // Assuming the system is linear, this will be of the form h(x_{t}) = H*x_{t} + v
164  // where H is the observation model/matrix, and v is zero-mean, Gaussian white noise with covariance R
165  //
166  // For the purposes of this example, let us assume we have something like a GPS that returns
167  // the current position of the robot. For this simple example, we can use a PriorFactor to model the
168  // observation as it depends on only a single state variable, x1. To model real sensor observations
169  // generally requires the creation of a new factor type. For example, factors for range sensors, bearing
170  // sensors, and camera projections have already been added to GTSAM.
171  //
172  // In the case of factor graphs, the factor related to the measurements would be defined as
173  // f4 = (h(x_{t}) - z_{t}) * R^-1 * (h(x_{t}) - z_{t})^T
174  // = (x_{t} - z_{t}) * R^-1 * (x_{t} - z_{t})^T
175  // This can be modeled using the PriorFactor, where the mean is z_{t} and the covariance is R.
176  Point2 z1(1.0, 0.0);
177  SharedDiagonal R1 = noiseModel::Isotropic::Sigma(2, 0.25);
178  PriorFactor<Point2> factor4(X(1), z1, R1);
179  // Linearize the factor and add it to the linear factor graph
180  linearFactorGraph->push_back(factor4.linearize(linearizationPoints));
181 
182  // We have now made the small factor graph f3-(x1)-f4
183  // where factor f3 is the prior from previous time ( P(x1) )
184  // and factor f4 is from the measurement, z1 ( P(x1|z1) )
185  // Eliminate this in order x1, to get Bayes net P(x1)
186  // As this is a filter, all we need is the posterior P(x1), so we just keep the root of the Bayes net
187  // We solve as before...
188 
189  // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) )
190  GaussianBayesNet::shared_ptr updatedBayesNet = linearFactorGraph->eliminateSequential(*ordering);
191  const GaussianConditional::shared_ptr& updatedConditional = updatedBayesNet->back();
192 
193  // Extract the current estimate of x1 from the Bayes Network
194  VectorValues updatedResult = updatedBayesNet->optimize();
195  Point2 x1_update = linearizationPoints.at<Point2>(X(1)) + updatedResult[X(1)];
196  traits<Point2>::Print(x1_update, "X1 Update");
197 
198  // Update the linearization point to the new estimate
199  linearizationPoints.update(X(1), x1_update);
200 
201 
202 
203 
204 
205 
206  // Wash, rinse, repeat for another time step
207  // Create a new, empty graph and add the prior from the previous step
208  linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph);
209 
210  // Convert the root conditional, P(x1) in this case, into a Prior for the next step
211  // The linearization point of this prior must be moved to the new estimate of x, and the key/index needs to be reset to 0,
212  // the first key in the next iteration
213  linearFactorGraph->add(
214  X(1),
215  updatedConditional->R(),
216  updatedConditional->d() - updatedConditional->R() * updatedResult[X(1)],
217  updatedConditional->get_model());
218 
219  // Create the desired ordering
221  ordering->push_back(X(1));
222  ordering->push_back(X(2));
223 
224  // Create a nonlinear factor describing the motion model (moving right again)
225  Point2 difference2(1,0);
226  SharedDiagonal Q2 = noiseModel::Isotropic::Sigma(2, 0.1);
227  BetweenFactor<Point2> factor6(X(1), X(2), difference2, Q2);
228 
229  // Linearize the factor and add it to the linear factor graph
230  linearizationPoints.insert(X(2), x1_update);
231  linearFactorGraph->push_back(factor6.linearize(linearizationPoints));
232 
233  // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x1,x2) = P(x1|x2)*P(x2) )
234  GaussianBayesNet::shared_ptr predictionBayesNet2 = linearFactorGraph->eliminateSequential(*ordering);
235  const GaussianConditional::shared_ptr& x2Conditional = predictionBayesNet2->back();
236 
237  // Extract the predicted state
238  VectorValues prediction2Result = predictionBayesNet2->optimize();
239  Point2 x2_predict = linearizationPoints.at<Point2>(X(2)) + prediction2Result[X(2)];
240  traits<Point2>::Print(x2_predict, "X2 Predict");
241 
242  // Update the linearization point to the new estimate
243  linearizationPoints.update(X(2), x2_predict);
244 
245  // Now add the next measurement
246  // Create a new, empty graph and add the prior from the previous step
247  linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph);
248 
249  // Convert the root conditional, P(x1) in this case, into a Prior for the next step
250  linearFactorGraph->add(
251  X(2),
252  x2Conditional->R(),
253  x2Conditional->d() - x2Conditional->R() * prediction2Result[X(2)],
254  x2Conditional->get_model());
255 
256  // Create the desired ordering
258  ordering->push_back(X(2));
259 
260  // And update using z2 ...
261  Point2 z2(2.0, 0.0);
262  SharedDiagonal R2 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.25, 0.25).finished());
263  PriorFactor<Point2> factor8(X(2), z2, R2);
264 
265  // Linearize the factor and add it to the linear factor graph
266  linearFactorGraph->push_back(factor8.linearize(linearizationPoints));
267 
268  // We have now made the small factor graph f7-(x2)-f8
269  // where factor f7 is the prior from previous time ( P(x2) )
270  // and factor f8 is from the measurement, z2 ( P(x2|z2) )
271  // Eliminate this in order x2, to get Bayes net P(x2)
272  // As this is a filter, all we need is the posterior P(x2), so we just keep the root of the Bayes net
273  // We solve as before...
274 
275  // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) )
276  GaussianBayesNet::shared_ptr updatedBayesNet2 = linearFactorGraph->eliminateSequential(*ordering);
277  const GaussianConditional::shared_ptr& updatedConditional2 = updatedBayesNet2->back();
278 
279  // Extract the current estimate of x2 from the Bayes Network
280  VectorValues updatedResult2 = updatedBayesNet2->optimize();
281  Point2 x2_update = linearizationPoints.at<Point2>(X(2)) + updatedResult2[X(2)];
282  traits<Point2>::Print(x2_update, "X2 Update");
283 
284  // Update the linearization point to the new estimate
285  linearizationPoints.update(X(2), x2_update);
286 
287 
288 
289 
290 
291 
292  // Wash, rinse, repeat for a third time step
293  // Create a new, empty graph and add the prior from the previous step
294  linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph);
295 
296  // Convert the root conditional, P(x1) in this case, into a Prior for the next step
297  linearFactorGraph->add(
298  X(2),
299  updatedConditional2->R(),
300  updatedConditional2->d() - updatedConditional2->R() * updatedResult2[X(2)],
301  updatedConditional2->get_model());
302 
303  // Create the desired ordering
305  ordering->push_back(X(2));
306  ordering->push_back(X(3));
307 
308  // Create a nonlinear factor describing the motion model
309  Point2 difference3(1,0);
310  SharedDiagonal Q3 = noiseModel::Isotropic::Sigma(2, 0.1);
311  BetweenFactor<Point2> factor10(X(2), X(3), difference3, Q3);
312 
313  // Linearize the factor and add it to the linear factor graph
314  linearizationPoints.insert(X(3), x2_update);
315  linearFactorGraph->push_back(factor10.linearize(linearizationPoints));
316 
317  // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x1,x2) = P(x1|x2)*P(x2) )
318  GaussianBayesNet::shared_ptr predictionBayesNet3 = linearFactorGraph->eliminateSequential(*ordering);
319  const GaussianConditional::shared_ptr& x3Conditional = predictionBayesNet3->back();
320 
321  // Extract the current estimate of x3 from the Bayes Network
322  VectorValues prediction3Result = predictionBayesNet3->optimize();
323  Point2 x3_predict = linearizationPoints.at<Point2>(X(3)) + prediction3Result[X(3)];
324  traits<Point2>::Print(x3_predict, "X3 Predict");
325 
326  // Update the linearization point to the new estimate
327  linearizationPoints.update(X(3), x3_predict);
328 
329 
330 
331  // Now add the next measurement
332  // Create a new, empty graph and add the prior from the previous step
333  linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph);
334 
335  // Convert the root conditional, P(x1) in this case, into a Prior for the next step
336  linearFactorGraph->add(
337  X(3),
338  x3Conditional->R(),
339  x3Conditional->d() - x3Conditional->R() * prediction3Result[X(3)],
340  x3Conditional->get_model());
341 
342  // Create the desired ordering
344  ordering->push_back(X(3));
345 
346  // And update using z3 ...
347  Point2 z3(3.0, 0.0);
348  SharedDiagonal R3 = noiseModel::Isotropic::Sigma(2, 0.25);
349  PriorFactor<Point2> factor12(X(3), z3, R3);
350 
351  // Linearize the factor and add it to the linear factor graph
352  linearFactorGraph->push_back(factor12.linearize(linearizationPoints));
353 
354  // We have now made the small factor graph f11-(x3)-f12
355  // where factor f11 is the prior from previous time ( P(x3) )
356  // and factor f12 is from the measurement, z3 ( P(x3|z3) )
357  // Eliminate this in order x3, to get Bayes net P(x3)
358  // As this is a filter, all we need is the posterior P(x3), so we just keep the root of the Bayes net
359  // We solve as before...
360 
361  // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) )
362  GaussianBayesNet::shared_ptr updatedBayesNet3 = linearFactorGraph->eliminateSequential(*ordering);
363  const GaussianConditional::shared_ptr& updatedConditional3 = updatedBayesNet3->back();
364 
365  // Extract the current estimate of x2 from the Bayes Network
366  VectorValues updatedResult3 = updatedBayesNet3->optimize();
367  Point2 x3_update = linearizationPoints.at<Point2>(X(3)) + updatedResult3[X(3)];
368  traits<Point2>::Print(x3_update, "X3 Update");
369 
370  // Update the linearization point to the new estimate
371  linearizationPoints.update(X(3), x3_update);
372 
373 
374 
375  return 0;
376 }
GaussianFactorGraph.h
Linear Factor Graph where all factors are Gaussians.
gtsam::GaussianFactorGraph::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianFactorGraph.h:82
asia::bayesNet
static const DiscreteBayesNet bayesNet
Definition: testDiscreteSearch.cpp:30
Vector.h
typedef and functions to augment Eigen's VectorXd
simple_graph::factor2
auto factor2
Definition: testJacobianFactor.cpp:203
gtsam::Vector2
Eigen::Vector2d Vector2
Definition: Vector.h:43
z1
Point2 z1
Definition: testTriangulationFactor.cpp:45
gtsam::PriorFactor
Definition: nonlinear/PriorFactor.h:30
simple::R3
static Rot3 R3
Definition: testInitializePose3.cpp:54
X
#define X
Definition: icosphere.cpp:20
GaussianBayesNet.h
Chordal Bayes Net, the result of eliminating a factor graph.
gtsam::Values::update
void update(Key j, const Value &val)
Definition: Values.cpp:170
result
Values result
Definition: OdometryOptimize.cpp:8
gtsam::Ordering::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: inference/Ordering.h:45
Point2.h
2D Point
so3::R1
SO3 R1
Definition: testShonanFactor.cpp:41
gtsam::NoiseModelFactor::linearize
std::shared_ptr< GaussianFactor > linearize(const Values &x) const override
Definition: NonlinearFactor.cpp:152
gtsam::GaussianFactorGraph
Definition: GaussianFactorGraph.h:73
gtsam::Values::at
const ValueType at(Key j) const
Definition: Values-inl.h:261
PriorFactor.h
z3
static const Unit3 z3
Definition: testRotateFactor.cpp:47
gtsam::VectorValues
Definition: VectorValues.h:74
BetweenFactor.h
gtsam::SharedDiagonal
noiseModel::Diagonal::shared_ptr SharedDiagonal
Definition: NoiseModel.h:764
Symbol.h
gtsam::Point2
Vector2 Point2
Definition: Point2.h:32
ordering
static enum @1096 ordering
gtsam::FactorGraph::back
sharedFactor back() const
Definition: FactorGraph.h:348
Q2
static double Q2[8]
Definition: ndtri.c:122
Eigen::Quaternion
The quaternion class used to represent 3D orientations and rotations.
Definition: ForwardDeclarations.h:293
so3::R2
SO3 R2
Definition: testShonanFactor.cpp:43
gtsam
traits
Definition: SFMdata.h:40
NoiseModel.h
gtsam::Values
Definition: Values.h:65
std
Definition: BFloat16.h:88
gtsam::Values::insert
void insert(Key j, const Value &val)
Definition: Values.cpp:156
gtsam::GaussianConditional::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianConditional.h:46
gtsam::Print
void Print(const CONTAINER &keys, const string &s, const KeyFormatter &keyFormatter)
Definition: Key.cpp:65
gtsam::Ordering
Definition: inference/Ordering.h:33
gtsam::GaussianBayesNet::shared_ptr
std::shared_ptr< This > shared_ptr
Definition: GaussianBayesNet.h:42
z2
static const Unit3 z2
Definition: testRotateFactor.cpp:46
gtsam::BetweenFactor
Definition: BetweenFactor.h:40
main
int main()
Definition: elaboratePoint2KalmanFilter.cpp:38


gtsam
Author(s):
autogenerated on Wed Mar 19 2025 03:01:38