NonlinearOptimizer.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
24 #include <gtsam/linear/PCGSolver.h>
27 
29 
30 #include <boost/algorithm/string.hpp>
31 #include <boost/shared_ptr.hpp>
32 
33 #include <stdexcept>
34 #include <iostream>
35 #include <iomanip>
36 
37 using namespace std;
38 
39 namespace gtsam {
40 
41 /* ************************************************************************* */
42 // NOTE(frank): unique_ptr by-value takes ownership, as discussed in
43 // http://stackoverflow.com/questions/8114276/
44 NonlinearOptimizer::NonlinearOptimizer(const NonlinearFactorGraph& graph,
45  std::unique_ptr<internal::NonlinearOptimizerState> state)
46  : graph_(graph), state_(std::move(state)) {}
47 
48 /* ************************************************************************* */
50 
51 /* ************************************************************************* */
52 double NonlinearOptimizer::error() const {
53  return state_->error;
54 }
55 
57  return state_->iterations;
58 }
59 
61  return state_->values;
62 }
63 
64 /* ************************************************************************* */
67  double currentError = error();
68 
69  // check if we're already close enough
70  if (currentError <= params.errorTol) {
72  cout << "Exiting, as error = " << currentError << " < " << params.errorTol << endl;
73  return;
74  }
75 
76  // Maybe show output
78  values().print("Initial values");
80  cout << "Initial error: " << currentError << endl;
81 
82  // Return if we already have too many iterations
83  if (iterations() >= params.maxIterations) {
85  cout << "iterations: " << iterations() << " >? " << params.maxIterations << endl;
86  }
87  return;
88  }
89 
90  // Iterative loop
91  double newError = currentError; // used to avoid repeated calls to error()
92  do {
93  // Do next iteration
94  currentError = newError;
95  iterate();
97 
98  // Update newError for either printouts or conditional-end checks:
99  newError = error();
100 
101  // User hook:
102  if (params.iterationHook)
103  params.iterationHook(iterations(), currentError, newError);
104 
105  // Maybe show output
107  values().print("newValues");
109  cout << "newError: " << newError << endl;
110  } while (iterations() < params.maxIterations &&
112  currentError, newError, params.verbosity) && std::isfinite(currentError));
113 
114  // Printing if verbose
116  cout << "iterations: " << iterations() << " >? " << params.maxIterations << endl;
117  if (iterations() >= params.maxIterations)
118  cout << "Terminating because reached maximum iterations" << endl;
119  }
120 }
121 
122 /* ************************************************************************* */
124  static const Values empty;
125  try {
126  defaultOptimize();
127  return values();
128  } catch (...) {
129  // uncaught exception, returning empty result
130  return empty;
131  }
132 }
133 
134 /* ************************************************************************* */
136  const NonlinearOptimizerParams& params) const {
137  // solution of linear solver is an update to the linearization point
139 
140  // Check which solver we are using
141  if (params.isMultifrontal()) {
142  // Multifrontal QR or Cholesky (decided by params.getEliminationFunction())
143  if (params.ordering)
144  delta = gfg.optimize(*params.ordering, params.getEliminationFunction());
145  else
146  delta = gfg.optimize(params.getEliminationFunction());
147  } else if (params.isSequential()) {
148  // Sequential QR or Cholesky (decided by params.getEliminationFunction())
149  if (params.ordering)
150  delta = gfg.eliminateSequential(*params.ordering, params.getEliminationFunction(),
151  boost::none, params.orderingType)->optimize();
152  else
153  delta = gfg.eliminateSequential(params.getEliminationFunction(), boost::none,
154  params.orderingType)->optimize();
155  } else if (params.isIterative()) {
156  // Conjugate Gradient -> needs params.iterativeParams
157  if (!params.iterativeParams)
158  throw std::runtime_error(
159  "NonlinearOptimizer::solve: cg parameter has to be assigned ...");
160 
161  if (auto pcg = boost::dynamic_pointer_cast<PCGSolverParameters>(
162  params.iterativeParams)) {
163  delta = PCGSolver(*pcg).optimize(gfg);
164  } else if (auto spcg =
165  boost::dynamic_pointer_cast<SubgraphSolverParameters>(
166  params.iterativeParams)) {
167  if (!params.ordering)
168  throw std::runtime_error("SubgraphSolver needs an ordering");
169  delta = SubgraphSolver(gfg, *spcg, *params.ordering).optimize();
170  } else {
171  throw std::runtime_error(
172  "NonlinearOptimizer::solve: special cg parameter type is not handled in LM solver ...");
173  }
174  } else {
175  throw std::runtime_error("NonlinearOptimizer::solve: Optimization parameter is invalid");
176  }
177 
178  // return update
179  return delta;
180 }
181 
182 /* ************************************************************************* */
183 bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold,
184  double errorThreshold, double currentError, double newError,
186  if (verbosity >= NonlinearOptimizerParams::ERROR) {
187  if (newError <= errorThreshold)
188  cout << "errorThreshold: " << newError << " < " << errorThreshold << endl;
189  else
190  cout << "errorThreshold: " << newError << " > " << errorThreshold << endl;
191  }
192 
193  if (newError <= errorThreshold)
194  return true;
195 
196  // check if diverges
197  double absoluteDecrease = currentError - newError;
198  if (verbosity >= NonlinearOptimizerParams::ERROR) {
199  if (absoluteDecrease <= absoluteErrorTreshold)
200  cout << "absoluteDecrease: " << setprecision(12) << absoluteDecrease << " < "
201  << absoluteErrorTreshold << endl;
202  else
203  cout << "absoluteDecrease: " << setprecision(12) << absoluteDecrease
204  << " >= " << absoluteErrorTreshold << endl;
205  }
206 
207  // calculate relative error decrease and update currentError
208  double relativeDecrease = absoluteDecrease / currentError;
209  if (verbosity >= NonlinearOptimizerParams::ERROR) {
210  if (relativeDecrease <= relativeErrorTreshold)
211  cout << "relativeDecrease: " << setprecision(12) << relativeDecrease << " < "
212  << relativeErrorTreshold << endl;
213  else
214  cout << "relativeDecrease: " << setprecision(12) << relativeDecrease
215  << " >= " << relativeErrorTreshold << endl;
216  }
217  bool converged = (relativeErrorTreshold && (relativeDecrease <= relativeErrorTreshold)) ||
218  (absoluteDecrease <= absoluteErrorTreshold);
219  if (verbosity >= NonlinearOptimizerParams::TERMINATION && converged) {
220  if (absoluteDecrease >= 0.0)
221  cout << "converged" << endl;
222  else
223  cout << "Warning: stopping nonlinear iterations because error increased" << endl;
224 
225  cout << "errorThreshold: " << newError << " <? " << errorThreshold << endl;
226  cout << "absoluteDecrease: " << setprecision(12) << absoluteDecrease << " <? "
227  << absoluteErrorTreshold << endl;
228  cout << "relativeDecrease: " << setprecision(12) << relativeDecrease << " <? "
229  << relativeErrorTreshold << endl;
230  }
231  return converged;
232 }
233 
234 /* ************************************************************************* */
235 GTSAM_EXPORT bool checkConvergence(const NonlinearOptimizerParams& params, double currentError,
236  double newError) {
237  return checkConvergence(params.relativeErrorTol, params.absoluteErrorTol, params.errorTol,
238  currentError, newError, params.verbosity);
239 }
240 }
virtual const Values & optimize()
VectorValues optimize(const Eliminate &function=EliminationTraitsType::DefaultEliminate) const
Subgraph Solver from IROS 2010.
virtual GaussianFactorGraph::shared_ptr iterate()=0
Verbosity verbosity
The printing verbosity during optimization (default SILENT)
Definition: Half.h:150
#define isfinite(X)
Definition: main.h:74
NonlinearFactorGraph graph
double absoluteErrorTol
The maximum absolute error decrease to stop iterating (default 1e-5)
double relativeErrorTol
The maximum relative error decrease to stop iterating (default 1e-5)
std::unique_ptr< internal::NonlinearOptimizerState > state_
PIMPL&#39;d state.
double errorTol
The maximum total error to stop iterating (default 0.0)
detail::enable_if_t<!detail::move_never< T >::value, T > move(object &&obj)
Definition: cast.h:1756
Factor Graph Values.
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold, double errorThreshold, double currentError, double newError, NonlinearOptimizerParams::Verbosity verbosity)
const Values & values() const
return values in current optimizer state
GaussianFactorGraph::Eliminate getEliminationFunction() const
boost::optional< Ordering > ordering
The optional variable elimination ordering, or empty to use COLAMD (default: empty) ...
VectorValues optimize() const
Optimize from zero.
Linear Factor Graph where all factors are Gaussians.
IterativeOptimizationParameters::shared_ptr iterativeParams
The container for iterativeOptimization parameters. used in CG Solvers.
static SmartStereoProjectionParams params
virtual const NonlinearOptimizerParams & _params() const =0
double error() const
return error in current optimizer state
traits
Definition: chartTesting.h:28
void print(const std::string &str="", const KeyFormatter &keyFormatter=DefaultKeyFormatter) const
Definition: Values.cpp:77
virtual VectorValues solve(const GaussianFactorGraph &gfg, const NonlinearOptimizerParams &params) const
Private class for NonlinearOptimizer state.
boost::shared_ptr< BayesNetType > eliminateSequential(OptionalOrderingType orderingType=boost::none, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex=boost::none) const
VectorValues optimize(const GaussianFactorGraph &gfg, const KeyInfo &keyInfo, const std::map< Key, Vector > &lambda, const VectorValues &initial) override
Definition: PCGSolver.cpp:60
Ordering::OrderingType orderingType
The method of ordering use during variable elimination (default COLAMD)
Base class and parameters for nonlinear optimization algorithms.
#define tictoc_finishedIteration()
Definition: timing.h:284
size_t maxIterations
The maximum iterations to stop iterating (default 100)
size_t iterations() const
return number of iterations in current optimizer state


gtsam
Author(s):
autogenerated on Sat May 8 2021 02:43:04