BatchFixedLagSmoother.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
25 
26 using namespace std;
27 
28 namespace gtsam {
29 
30 /* ************************************************************************* */
31 void BatchFixedLagSmoother::print(const string& s,
32  const KeyFormatter& keyFormatter) const {
33  FixedLagSmoother::print(s, keyFormatter);
34  // TODO: What else to print?
35 }
36 
37 /* ************************************************************************* */
38 bool BatchFixedLagSmoother::equals(const FixedLagSmoother& rhs,
39  double tol) const {
40  const BatchFixedLagSmoother* e =
41  dynamic_cast<const BatchFixedLagSmoother*>(&rhs);
42  return e != nullptr && FixedLagSmoother::equals(*e, tol)
43  && factors_.equals(e->factors_, tol) && theta_.equals(e->theta_, tol);
44 }
45 
46 /* ************************************************************************* */
47 Matrix BatchFixedLagSmoother::marginalCovariance(Key key) const {
48  throw runtime_error(
49  "BatchFixedLagSmoother::marginalCovariance not implemented");
50 }
51 
52 /* ************************************************************************* */
54  const NonlinearFactorGraph& newFactors, const Values& newTheta,
55  const KeyTimestampMap& timestamps, const FactorIndices& factorsToRemove) {
56 
57  // Update all of the internal variables with the new information
58  gttic(augment_system);
59  // Add the new variables to theta
60  theta_.insert(newTheta);
61  // Add new variables to the end of the ordering
62  for (const auto key : newTheta.keys()) {
63  ordering_.push_back(key);
64  }
65  // Augment Delta
66  delta_.insert(newTheta.zeroVectors());
67 
68  // Add the new factors to the graph, updating the variable index
69  insertFactors(newFactors);
70  gttoc(augment_system);
71 
72  // remove factors in factorToRemove
73  for(const size_t i : factorsToRemove){
74  if(factors_[i])
75  factors_[i].reset();
76  }
77 
78  // Update the Timestamps associated with the factor keys
79  updateKeyTimestampMap(timestamps);
80 
81  // Get current timestamp
82  double current_timestamp = getCurrentTimestamp();
83 
84  // Find the set of variables to be marginalized out
85  KeyVector marginalizableKeys = findKeysBefore(
86  current_timestamp - smootherLag_);
87 
88  // Reorder
89  gttic(reorder);
90  reorder(marginalizableKeys);
91  gttoc(reorder);
92 
93  // Optimize
94  gttic(optimize);
95  Result result;
96  if (factors_.size() > 0) {
97  result = optimize();
98  }
99  gttoc(optimize);
100 
101  // Marginalize out old variables.
102  gttic(marginalize);
103  if (marginalizableKeys.size() > 0) {
104  marginalize(marginalizableKeys);
105  }
106  gttoc(marginalize);
107 
108  return result;
109 }
110 
111 /* ************************************************************************* */
112 void BatchFixedLagSmoother::insertFactors(
113  const NonlinearFactorGraph& newFactors) {
114  for(const auto& factor: newFactors) {
115  Key index;
116  // Insert the factor into an existing hole in the factor graph, if possible
117  if (availableSlots_.size() > 0) {
118  index = availableSlots_.front();
119  availableSlots_.pop();
120  factors_.replace(index, factor);
121  } else {
122  index = factors_.size();
123  factors_.push_back(factor);
124  }
125  // Update the FactorIndex
126  for(Key key: *factor) {
127  factorIndex_[key].insert(index);
128  }
129  }
130 }
131 
132 /* ************************************************************************* */
133 void BatchFixedLagSmoother::removeFactors(
134  const set<size_t>& deleteFactors) {
135  for(size_t slot: deleteFactors) {
136  if (factors_.at(slot)) {
137  // Remove references to this factor from the FactorIndex
138  for(Key key: *(factors_.at(slot))) {
139  factorIndex_[key].erase(slot);
140  }
141  // Remove the factor from the factor graph
142  factors_.remove(slot);
143  // Add the factor's old slot to the list of available slots
144  availableSlots_.push(slot);
145  } else {
146  // TODO: Throw an error??
147  cout << "Attempting to remove a factor from slot " << slot
148  << ", but it is already nullptr." << endl;
149  }
150  }
151 }
152 
153 /* ************************************************************************* */
154 void BatchFixedLagSmoother::eraseKeys(const KeyVector& keys) {
155 
156  for(Key key: keys) {
157  // Erase the key from the values
158  theta_.erase(key);
159 
160  // Erase the key from the factor index
161  factorIndex_.erase(key);
162 
163  // Erase the key from the set of linearized keys
164  if (linearValues_.exists(key)) {
165  linearValues_.erase(key);
166  }
167  }
168 
169  eraseKeyTimestampMap(keys);
170 
171  // Remove marginalized keys from the ordering and delta
172  for(Key key: keys) {
173  ordering_.erase(find(ordering_.begin(), ordering_.end(), key));
174  delta_.erase(key);
175  }
176 }
177 
178 /* ************************************************************************* */
179 void BatchFixedLagSmoother::reorder(const KeyVector& marginalizeKeys) {
180  // COLAMD groups will be used to place marginalize keys in Group 0, and everything else in Group 1
181  ordering_ = Ordering::ColamdConstrainedFirst(factors_, marginalizeKeys);
182 }
183 
184 /* ************************************************************************* */
186 
187  // Create output result structure
188  Result result;
189  result.nonlinearVariables = theta_.size() - linearValues_.size();
190  result.linearVariables = linearValues_.size();
191 
192  // Set optimization parameters
193  double lambda = parameters_.lambdaInitial;
194  double lambdaFactor = parameters_.lambdaFactor;
195  double lambdaUpperBound = parameters_.lambdaUpperBound;
196  double lambdaLowerBound = 1.0e-10;
197  size_t maxIterations = parameters_.maxIterations;
198  double relativeErrorTol = parameters_.relativeErrorTol;
199  double absoluteErrorTol = parameters_.absoluteErrorTol;
200  double errorTol = parameters_.errorTol;
201 
202  // Create a Values that holds the current evaluation point
203  Values evalpoint = theta_.retract(delta_);
204  result.error = factors_.error(evalpoint);
205 
206  // check if we're already close enough
207  if (result.error <= errorTol) {
208  return result;
209  }
210 
211  // Use a custom optimization loop so the linearization points can be controlled
212  double previousError;
213  VectorValues newDelta;
214  do {
215  previousError = result.error;
216 
217  // Do next iteration
218  gttic(optimizer_iteration);
219  {
220  // Linearize graph around the linearization point
221  GaussianFactorGraph linearFactorGraph = *factors_.linearize(theta_);
222 
223  // Keep increasing lambda until we make make progress
224  while (true) {
225 
226  // Add prior factors at the current solution
227  gttic(damp);
228  GaussianFactorGraph dampedFactorGraph(linearFactorGraph);
229  dampedFactorGraph.reserve(linearFactorGraph.size() + delta_.size());
230  {
231  // for each of the variables, add a prior at the current solution
232  double sigma = 1.0 / sqrt(lambda);
233  for(const auto& key_value: delta_) {
234  size_t dim = key_value.second.size();
235  Matrix A = Matrix::Identity(dim, dim);
236  Vector b = key_value.second;
237  SharedDiagonal model = noiseModel::Isotropic::Sigma(dim, sigma);
239  new JacobianFactor(key_value.first, A, b, model));
240  dampedFactorGraph.push_back(prior);
241  }
242  }
243  gttoc(damp);
244  result.intermediateSteps++;
245 
246  gttic(solve);
247  // Solve Damped Gaussian Factor Graph
248  newDelta = dampedFactorGraph.optimize(ordering_,
249  parameters_.getEliminationFunction());
250  // update the evalpoint with the new delta
251  evalpoint = theta_.retract(newDelta);
252  gttoc(solve);
253 
254  // Evaluate the new error
255  gttic(compute_error);
256  double error = factors_.error(evalpoint);
257  gttoc(compute_error);
258 
259  if (error < result.error) {
260  // Keep this change
261  // Update the error value
262  result.error = error;
263  // Update the linearization point
264  theta_ = evalpoint;
265  // Reset the deltas to zeros
266  delta_.setZero();
267  // Put the linearization points and deltas back for specific variables
268  if (enforceConsistency_ && (linearValues_.size() > 0)) {
269  theta_.update(linearValues_);
270  for(const auto key: linearValues_.keys()) {
271  delta_.at(key) = newDelta.at(key);
272  }
273  }
274  // Decrease lambda for next time
275  lambda /= lambdaFactor;
276  if (lambda < lambdaLowerBound) {
277  lambda = lambdaLowerBound;
278  }
279  // End this lambda search iteration
280  break;
281  } else {
282  // Reject this change
283  if (lambda >= lambdaUpperBound) {
284  // The maximum lambda has been used. Print a warning and end the search.
285  if(parameters_.verbosity >= NonlinearOptimizerParams::TERMINATION
286  || parameters_.verbosityLM == LevenbergMarquardtParams::SUMMARY) {
287  cout
288  << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda"
289  << endl;
290  }
291  break;
292  } else {
293  // Increase lambda and continue searching
294  lambda *= lambdaFactor;
295  }
296  }
297  } // end while
298  }
299  gttoc(optimizer_iteration);
300 
301  result.iterations++;
302  } while (result.iterations < maxIterations
303  && !checkConvergence(relativeErrorTol, absoluteErrorTol, errorTol,
304  previousError, result.error, NonlinearOptimizerParams::SILENT));
305 
306  return result;
307 }
308 
309 /* ************************************************************************* */
310 void BatchFixedLagSmoother::marginalize(const KeyVector& marginalizeKeys) {
311  // In order to marginalize out the selected variables, the factors involved in those variables
312  // must be identified and removed. Also, the effect of those removed factors on the
313  // remaining variables needs to be accounted for. This will be done with linear container factors
314  // from the result of a partial elimination. This function removes the marginalized factors and
315  // adds the linearized factors back in.
316 
317  // Identify all of the factors involving any marginalized variable. These must be removed.
318  set<size_t> removedFactorSlots;
319  const VariableIndex variableIndex(factors_);
320  for(Key key: marginalizeKeys) {
321  const auto& slots = variableIndex[key];
322  removedFactorSlots.insert(slots.begin(), slots.end());
323  }
324 
325  // Add the removed factors to a factor graph
326  NonlinearFactorGraph removedFactors;
327  for(size_t slot: removedFactorSlots) {
328  if (factors_.at(slot)) {
329  removedFactors.push_back(factors_.at(slot));
330  }
331  }
332 
333  // Calculate marginal factors on the remaining keys
334  NonlinearFactorGraph marginalFactors = CalculateMarginalFactors(
335  removedFactors, theta_, marginalizeKeys, parameters_.getEliminationFunction());
336 
337  // Remove marginalized factors from the factor graph
338  removeFactors(removedFactorSlots);
339 
340  // Remove marginalized keys from the system
341  eraseKeys(marginalizeKeys);
342 
343  // Insert the new marginal factors
344  insertFactors(marginalFactors);
345 }
346 
347 /* ************************************************************************* */
349  const string& label) {
350  cout << label;
351  for(Key key: keys) {
352  cout << " " << DefaultKeyFormatter(key);
353  }
354  cout << endl;
355 }
356 
357 /* ************************************************************************* */
359  const string& label) {
360  cout << label;
361  for(Key key: keys) {
362  cout << " " << DefaultKeyFormatter(key);
363  }
364  cout << endl;
365 }
366 
367 /* ************************************************************************* */
368 void BatchFixedLagSmoother::PrintSymbolicFactor(
369  const NonlinearFactor::shared_ptr& factor) {
370  cout << "f(";
371  if (factor) {
372  for(Key key: factor->keys()) {
373  cout << " " << DefaultKeyFormatter(key);
374  }
375  } else {
376  cout << " nullptr";
377  }
378  cout << " )" << endl;
379 }
380 
381 /* ************************************************************************* */
382 void BatchFixedLagSmoother::PrintSymbolicFactor(
383  const GaussianFactor::shared_ptr& factor) {
384  cout << "f(";
385  for(Key key: factor->keys()) {
386  cout << " " << DefaultKeyFormatter(key);
387  }
388  cout << " )" << endl;
389 }
390 
391 /* ************************************************************************* */
392 void BatchFixedLagSmoother::PrintSymbolicGraph(
393  const NonlinearFactorGraph& graph, const string& label) {
394  cout << label << endl;
395  for(const auto& factor: graph) {
396  PrintSymbolicFactor(factor);
397  }
398 }
399 
400 /* ************************************************************************* */
401 void BatchFixedLagSmoother::PrintSymbolicGraph(const GaussianFactorGraph& graph,
402  const string& label) {
403  cout << label << endl;
404  for(const auto& factor: graph) {
405  PrintSymbolicFactor(factor);
406  }
407 }
408 
409 /* ************************************************************************* */
410 GaussianFactorGraph BatchFixedLagSmoother::CalculateMarginalFactors(
411  const GaussianFactorGraph& graph, const KeyVector& keys,
412  const GaussianFactorGraph::Eliminate& eliminateFunction) {
413  if (keys.size() == 0) {
414  // There are no keys to marginalize. Simply return the input factors
415  return graph;
416  } else {
417  // .first is the eliminated Bayes tree, while .second is the remaining factor graph
418  return *graph.eliminatePartialMultifrontal(keys, eliminateFunction).second;
419  }
420 }
421 
422 /* ************************************************************************* */
423 NonlinearFactorGraph BatchFixedLagSmoother::CalculateMarginalFactors(
424  const NonlinearFactorGraph& graph, const Values& theta, const KeyVector& keys,
425  const GaussianFactorGraph::Eliminate& eliminateFunction) {
426  if (keys.size() == 0) {
427  // There are no keys to marginalize. Simply return the input factors
428  return graph;
429  } else {
430  // Create the linear factor graph
431  const auto linearFactorGraph = graph.linearize(theta);
432 
433  const auto marginalLinearFactors =
434  CalculateMarginalFactors(*linearFactorGraph, keys, eliminateFunction);
435 
436  // Wrap in nonlinear container factors
437  return LinearContainerFactor::ConvertLinearGraph(marginalLinearFactors, theta);
438  }
439 }
440 
441 /* ************************************************************************* */
442 }
gttoc
#define gttoc(label)
Definition: timing.h:296
gtsam::NonlinearFactor::shared_ptr
std::shared_ptr< This > shared_ptr
Definition: NonlinearFactor.h:78
Eigen::internal::print
EIGEN_STRONG_INLINE Packet4f print(const Packet4f &a)
Definition: NEON/PacketMath.h:3115
relicense.update
def update(text)
Definition: relicense.py:46
GaussianFactorGraph.h
Linear Factor Graph where all factors are Gaussians.
gtsam::Values::keys
KeyVector keys() const
Definition: Values.cpp:218
s
RealScalar s
Definition: level1_cplx_impl.h:126
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
LinearContainerFactor.h
Wrap Jacobian and Hessian linear factors to allow simple injection into a nonlinear graph.
optimize
int optimize(const SfmData &db, const NonlinearFactorGraph &graph, const Values &initial, bool separateCalibration=false)
Definition: timeSFMBAL.h:64
keys
const KeyVector keys
Definition: testRegularImplicitSchurFactor.cpp:40
b
Scalar * b
Definition: benchVecAdd.cpp:17
gtsam::VectorValues::at
Vector & at(Key j)
Definition: VectorValues.h:142
gtsam::JacobianFactor
Definition: JacobianFactor.h:91
gtsam::Matrix
Eigen::MatrixXd Matrix
Definition: base/Matrix.h:39
gtsam::FastSet< Key >
GaussianJunctionTree.h
gtsam::Values::update
void update(Key j, const Value &val)
Definition: Values.cpp:169
gtsam::Vector
Eigen::VectorXd Vector
Definition: Vector.h:38
gtsam::KeyVector
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:92
result
Values result
Definition: OdometryOptimize.cpp:8
sampling::sigma
static const double sigma
Definition: testGaussianBayesNet.cpp:169
gtsam::DefaultKeyFormatter
KeyFormatter DefaultKeyFormatter
Assign default key formatter.
Definition: Key.cpp:30
A
Matrix< SCALARA, Dynamic, Dynamic, opt_A > A
Definition: bench_gemm.cpp:48
gtsam::checkConvergence
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold, double errorThreshold, double currentError, double newError, NonlinearOptimizerParams::Verbosity verbosity)
Definition: NonlinearOptimizer.cpp:182
gtsam::GaussianFactorGraph
Definition: GaussianFactorGraph.h:73
gtsam::Values::retract
Values retract(const VectorValues &delta) const
Definition: Values.cpp:98
gtsam::FixedLagSmoother
Definition: nonlinear/FixedLagSmoother.h:33
gtsam::VectorValues
Definition: VectorValues.h:74
gtsam::KeyFormatter
std::function< std::string(Key)> KeyFormatter
Typedef for a function to format a key, i.e. to convert it to a string.
Definition: Key.h:35
gtsam::FixedLagSmoother::Result
Definition: nonlinear/FixedLagSmoother.h:48
gtsam::NonlinearFactorGraph
Definition: NonlinearFactorGraph.h:55
gtsam::GaussianFactor::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianFactor.h:42
gtsam::BatchFixedLagSmoother
Definition: nonlinear/BatchFixedLagSmoother.h:29
gtsam::SharedDiagonal
noiseModel::Diagonal::shared_ptr SharedDiagonal
Definition: NoiseModel.h:743
gtsam::VariableIndex
Definition: VariableIndex.h:41
model
noiseModel::Diagonal::shared_ptr model
Definition: doc/Code/Pose2SLAMExample.cpp:7
lambda
static double lambda[]
Definition: jv.c:524
key
const gtsam::Symbol key('X', 0)
gtsam::FactorGraph::size
size_t size() const
Definition: FactorGraph.h:297
gtsam::Values::zeroVectors
VectorValues zeroVectors() const
Definition: Values.cpp:260
simulated2D::prior
Point2 prior(const Point2 &x)
Prior on a single pose.
Definition: simulated2D.h:88
gtsam::FixedLagSmoother::KeyTimestampMap
std::map< Key, double > KeyTimestampMap
Typedef for a Key-Timestamp map/database.
Definition: nonlinear/FixedLagSmoother.h:41
gtsam
traits
Definition: chartTesting.h:28
error
static double error
Definition: testRot3.cpp:37
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
gtsam::Values
Definition: Values.h:65
std
Definition: BFloat16.h:88
gtsam::tol
const G double tol
Definition: Group.h:79
GaussianFactor.h
A factor with a quadratic error function - a Gaussian.
gtsam::FactorGraph::reserve
void reserve(size_t size)
Definition: FactorGraph.h:143
gtsam::GaussianFactorGraph::optimize
VectorValues optimize(const Eliminate &function=EliminationTraitsType::DefaultEliminate) const
Definition: GaussianFactorGraph.cpp:309
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
gtsam::PrintKeySet
void PrintKeySet(const KeySet &keys, const string &s, const KeyFormatter &keyFormatter)
Utility function to print sets of keys with optional prefix.
Definition: Key.cpp:88
ceres::sqrt
Jet< T, N > sqrt(const Jet< T, N > &f)
Definition: jet.h:418
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9
gttic
#define gttic(label)
Definition: timing.h:295
BatchFixedLagSmoother.h
An LM-based fixed-lag smoother.
gtsam::FactorIndices
FastVector< FactorIndex > FactorIndices
Define collection types:
Definition: Factor.h:36


gtsam
Author(s):
autogenerated on Thu Jun 13 2024 03:01:46