NonlinearOptimizer.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
24 #include <gtsam/linear/PCGSolver.h>
27 
29 
30 #include <stdexcept>
31 #include <iostream>
32 #include <iomanip>
33 
34 using namespace std;
35 
36 namespace gtsam {
37 
38 /* ************************************************************************* */
39 // NOTE(frank): unique_ptr by-value takes ownership, as discussed in
40 // http://stackoverflow.com/questions/8114276/
41 NonlinearOptimizer::NonlinearOptimizer(const NonlinearFactorGraph& graph,
42  std::unique_ptr<internal::NonlinearOptimizerState> state)
43  : graph_(graph), state_(std::move(state)) {}
44 
45 /* ************************************************************************* */
47 
48 /* ************************************************************************* */
49 double NonlinearOptimizer::error() const {
50  return state_->error;
51 }
52 
54  return state_->iterations;
55 }
56 
58  return state_->values;
59 }
60 
61 /* ************************************************************************* */
64  double currentError = error();
65 
66  // check if we're already close enough
67  if (currentError <= params.errorTol) {
68  if (params.verbosity >= NonlinearOptimizerParams::ERROR)
69  cout << "Exiting, as error = " << currentError << " < " << params.errorTol << endl;
70  return;
71  }
72 
73  // Maybe show output
75  values().print("Initial values");
76  if (params.verbosity >= NonlinearOptimizerParams::ERROR)
77  cout << "Initial error: " << currentError << endl;
78 
79  // Return if we already have too many iterations
80  if (iterations() >= params.maxIterations) {
82  cout << "iterations: " << iterations() << " >? " << params.maxIterations << endl;
83  }
84  return;
85  }
86 
87  // Iterative loop
88  double newError = currentError; // used to avoid repeated calls to error()
89  do {
90  // Do next iteration
91  currentError = newError;
92  iterate();
94 
95  // Update newError for either printouts or conditional-end checks:
96  newError = error();
97 
98  // User hook:
99  if (params.iterationHook)
100  params.iterationHook(iterations(), currentError, newError);
101 
102  // Maybe show output
103  if (params.verbosity >= NonlinearOptimizerParams::VALUES)
104  values().print("newValues");
105  if (params.verbosity >= NonlinearOptimizerParams::ERROR)
106  cout << "newError: " << newError << endl;
107  } while (iterations() < params.maxIterations &&
108  !checkConvergence(params.relativeErrorTol, params.absoluteErrorTol, params.errorTol,
109  currentError, newError, params.verbosity) && std::isfinite(currentError));
110 
111  // Printing if verbose
113  cout << "iterations: " << iterations() << " >? " << params.maxIterations << endl;
114  if (iterations() >= params.maxIterations)
115  cout << "Terminating because reached maximum iterations" << endl;
116  }
117 }
118 
119 /* ************************************************************************* */
121  static const Values empty;
122  try {
123  defaultOptimize();
124  return values();
125  } catch (...) {
126  // uncaught exception, returning empty result
127  return empty;
128  }
129 }
130 
131 /* ************************************************************************* */
133  const NonlinearOptimizerParams& params) const {
134  // solution of linear solver is an update to the linearization point
136 
137  // Check which solver we are using
138  if (params.isMultifrontal()) {
139  // Multifrontal QR or Cholesky (decided by params.getEliminationFunction())
140  if (params.ordering)
141  delta = gfg.optimize(*params.ordering, params.getEliminationFunction());
142  else
143  delta = gfg.optimize(params.getEliminationFunction());
144  } else if (params.isSequential()) {
145  // Sequential QR or Cholesky (decided by params.getEliminationFunction())
146  if (params.ordering)
147  delta = gfg.eliminateSequential(*params.ordering,
148  params.getEliminationFunction())
149  ->optimize();
150  else
151  delta = gfg.eliminateSequential(params.orderingType,
152  params.getEliminationFunction())
153  ->optimize();
154  } else if (params.isIterative()) {
155  // Conjugate Gradient -> needs params.iterativeParams
156  if (!params.iterativeParams)
157  throw std::runtime_error(
158  "NonlinearOptimizer::solve: cg parameter has to be assigned ...");
159 
160  if (auto pcg = std::dynamic_pointer_cast<PCGSolverParameters>(
161  params.iterativeParams)) {
162  delta = PCGSolver(*pcg).optimize(gfg);
163  } else if (auto spcg =
164  std::dynamic_pointer_cast<SubgraphSolverParameters>(
165  params.iterativeParams)) {
166  if (!params.ordering)
167  throw std::runtime_error("SubgraphSolver needs an ordering");
168  delta = SubgraphSolver(gfg, *spcg, *params.ordering).optimize();
169  } else {
170  throw std::runtime_error(
171  "NonlinearOptimizer::solve: special cg parameter type is not handled in LM solver ...");
172  }
173  } else {
174  throw std::runtime_error("NonlinearOptimizer::solve: Optimization parameter is invalid");
175  }
176 
177  // return update
178  return delta;
179 }
180 
181 /* ************************************************************************* */
182 bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold,
183  double errorThreshold, double currentError, double newError,
185  if (verbosity >= NonlinearOptimizerParams::ERROR) {
186  if (newError <= errorThreshold)
187  cout << "errorThreshold: " << newError << " < " << errorThreshold << endl;
188  else
189  cout << "errorThreshold: " << newError << " > " << errorThreshold << endl;
190  }
191 
192  if (newError <= errorThreshold)
193  return true;
194 
195  // check if diverges
196  double absoluteDecrease = currentError - newError;
197  if (verbosity >= NonlinearOptimizerParams::ERROR) {
198  if (absoluteDecrease <= absoluteErrorTreshold)
199  cout << "absoluteDecrease: " << setprecision(12) << absoluteDecrease << " < "
200  << absoluteErrorTreshold << endl;
201  else
202  cout << "absoluteDecrease: " << setprecision(12) << absoluteDecrease
203  << " >= " << absoluteErrorTreshold << endl;
204  }
205 
206  // calculate relative error decrease and update currentError
207  double relativeDecrease = absoluteDecrease / currentError;
208  if (verbosity >= NonlinearOptimizerParams::ERROR) {
209  if (relativeDecrease <= relativeErrorTreshold)
210  cout << "relativeDecrease: " << setprecision(12) << relativeDecrease << " < "
211  << relativeErrorTreshold << endl;
212  else
213  cout << "relativeDecrease: " << setprecision(12) << relativeDecrease
214  << " >= " << relativeErrorTreshold << endl;
215  }
216  bool converged = (relativeErrorTreshold && (relativeDecrease <= relativeErrorTreshold)) ||
217  (absoluteDecrease <= absoluteErrorTreshold);
218  if (verbosity >= NonlinearOptimizerParams::TERMINATION && converged) {
219  if (absoluteDecrease >= 0.0)
220  cout << "converged" << endl;
221  else
222  cout << "Warning: stopping nonlinear iterations because error increased" << endl;
223 
224  cout << "errorThreshold: " << newError << " <? " << errorThreshold << endl;
225  cout << "absoluteDecrease: " << setprecision(12) << absoluteDecrease << " <? "
226  << absoluteErrorTreshold << endl;
227  cout << "relativeDecrease: " << setprecision(12) << relativeDecrease << " <? "
228  << relativeErrorTreshold << endl;
229  }
230  return converged;
231 }
232 
233 /* ************************************************************************* */
234 GTSAM_EXPORT bool checkConvergence(const NonlinearOptimizerParams& params, double currentError,
235  double newError) {
236  return checkConvergence(params.relativeErrorTol, params.absoluteErrorTol, params.errorTol,
237  currentError, newError, params.verbosity);
238 }
239 }
gtsam::EliminateableFactorGraph::eliminateSequential
std::shared_ptr< BayesNetType > eliminateSequential(OptionalOrderingType orderingType={}, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Definition: EliminateableFactorGraph-inst.h:29
GaussianFactorGraph.h
Linear Factor Graph where all factors are Gaussians.
SubgraphSolver.h
Subgraph Solver from IROS 2010.
gtsam::NonlinearOptimizer::solve
virtual VectorValues solve(const GaussianFactorGraph &gfg, const NonlinearOptimizerParams &params) const
Definition: NonlinearOptimizer.cpp:132
tictoc_finishedIteration
#define tictoc_finishedIteration()
Definition: timing.h:299
NonlinearOptimizerState.h
Private class for NonlinearOptimizer state.
gtsam::NonlinearOptimizer::values
const Values & values() const
return values in current optimizer state
Definition: NonlinearOptimizer.cpp:57
gtsam::NonlinearOptimizerParams::Verbosity
Verbosity
Definition: NonlinearOptimizerParams.h:38
NonlinearOptimizer.h
Base class and parameters for nonlinear optimization algorithms.
Ordering.h
Variable ordering for the elimination algorithm.
gtsam::NonlinearOptimizer::optimizeSafely
const Values & optimizeSafely()
Definition: NonlinearOptimizer.cpp:120
vanilla::params
static const SmartProjectionParams params
Definition: smartFactorScenarios.h:69
gtsam::PCGSolver::optimize
VectorValues optimize(const GaussianFactorGraph &gfg, const KeyInfo &keyInfo, const std::map< Key, Vector > &lambda, const VectorValues &initial) override
Definition: PCGSolver.cpp:54
gtsam::NonlinearOptimizer::error
double error() const
return error in current optimizer state
Definition: NonlinearOptimizer.cpp:49
gtsam::checkConvergence
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold, double errorThreshold, double currentError, double newError, NonlinearOptimizerParams::Verbosity verbosity)
Definition: NonlinearOptimizer.cpp:182
gtsam::GaussianFactorGraph
Definition: GaussianFactorGraph.h:73
GaussianEliminationTree.h
gtsam.examples.PlanarManipulatorExample.delta
def delta(g0, g1)
Definition: PlanarManipulatorExample.py:45
gtsam::NonlinearOptimizerParams
Definition: NonlinearOptimizerParams.h:35
gtsam::VectorValues
Definition: VectorValues.h:74
gtsam::PCGSolver
Definition: PCGSolver.h:55
gtsam::NonlinearFactorGraph
Definition: NonlinearFactorGraph.h:55
PCGSolver.h
isfinite
#define isfinite(X)
Definition: main.h:95
gtsam::NonlinearOptimizer::_params
virtual const NonlinearOptimizerParams & _params() const =0
gtsam::NonlinearOptimizerParams::TERMINATION
@ TERMINATION
Definition: NonlinearOptimizerParams.h:39
gtsam::SubgraphSolver::optimize
VectorValues optimize() const
Optimize from zero.
Definition: SubgraphSolver.cpp:68
VectorValues.h
Factor Graph Values.
gtsam::NonlinearOptimizer::iterate
virtual GaussianFactorGraph::shared_ptr iterate()=0
gtsam::NonlinearOptimizer::iterations
size_t iterations() const
return number of iterations in current optimizer state
Definition: NonlinearOptimizer.cpp:53
move
detail::enable_if_t<!detail::move_never< T >::value, T > move(object &&obj)
Definition: cast.h:1243
empty
Definition: test_copy_move.cpp:19
gtsam::Values::print
void print(const std::string &str="", const KeyFormatter &keyFormatter=DefaultKeyFormatter) const
Definition: Values.cpp:67
gtsam
traits
Definition: SFMdata.h:40
gtsam::NonlinearOptimizer::~NonlinearOptimizer
virtual ~NonlinearOptimizer()
Definition: NonlinearOptimizer.cpp:46
gtsam::Values
Definition: Values.h:65
gtsam::NonlinearOptimizer::defaultOptimize
void defaultOptimize()
Definition: NonlinearOptimizer.cpp:62
std
Definition: BFloat16.h:88
gtsam::NonlinearOptimizerParams::ERROR
@ ERROR
Definition: NonlinearOptimizerParams.h:39
gtsam::GaussianFactorGraph::optimize
VectorValues optimize(const Eliminate &function=EliminationTraitsType::DefaultEliminate) const
Definition: GaussianFactorGraph.cpp:309
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
gtsam::SubgraphSolver
Definition: SubgraphSolver.h:76
gtsam::NonlinearOptimizer::state_
std::unique_ptr< internal::NonlinearOptimizerState > state_
PIMPL'd state.
Definition: NonlinearOptimizer.h:80
gtsam::NonlinearOptimizerParams::VALUES
@ VALUES
Definition: NonlinearOptimizerParams.h:39
make_changelog.state
state
Definition: make_changelog.py:29


gtsam
Author(s):
autogenerated on Sun Dec 22 2024 04:12:24