NonlinearConjugateGradientOptimizer.h
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
19 #pragma once
20 
21 #include <gtsam/base/Manifold.h>
23 
24 #include <stdexcept>
25 
26 namespace gtsam {
27 
29 template <typename Gradient>
30 double FletcherReeves(const Gradient &currentGradient,
31  const Gradient &prevGradient) {
32  // Fletcher-Reeves: beta = g_n'*g_n/g_n-1'*g_n-1
33  const double beta =
34  currentGradient.dot(currentGradient) / prevGradient.dot(prevGradient);
35  return beta;
36 }
37 
39 template <typename Gradient>
40 double PolakRibiere(const Gradient &currentGradient,
41  const Gradient &prevGradient) {
42  // Polak-Ribiere: beta = g_n'*(g_n-g_n-1)/g_n-1'*g_n-1
43  const double beta =
44  std::max(0.0, currentGradient.dot(currentGradient - prevGradient) /
45  prevGradient.dot(prevGradient));
46  return beta;
47 }
48 
51 template <typename Gradient>
52 double HestenesStiefel(const Gradient &currentGradient,
53  const Gradient &prevGradient,
54  const Gradient &direction) {
55  // Hestenes-Stiefel: beta = g_n'*(g_n-g_n-1)/(-s_n-1')*(g_n-g_n-1)
56  Gradient d = currentGradient - prevGradient;
57  const double beta = std::max(0.0, currentGradient.dot(d) / -direction.dot(d));
58  return beta;
59 }
60 
62 template <typename Gradient>
63 double DaiYuan(const Gradient &currentGradient, const Gradient &prevGradient,
64  const Gradient &direction) {
65  // Dai-Yuan: beta = g_n'*g_n/(-s_n-1')*(g_n-g_n-1)
66  const double beta =
67  std::max(0.0, currentGradient.dot(currentGradient) /
68  -direction.dot(currentGradient - prevGradient));
69  return beta;
70 }
71 
72 enum class DirectionMethod {
76  DaiYuan
77 };
78 
81  : public NonlinearOptimizer {
82  /* a class for the nonlinearConjugateGradient template */
83  class System {
84  public:
85  typedef Values State;
88 
89  protected:
91 
92  public:
93  System(const NonlinearFactorGraph &graph) : graph_(graph) {}
94  double error(const State &state) const;
95  Gradient gradient(const State &state) const;
96  State advance(const State &current, const double alpha,
97  const Gradient &g) const;
98  };
99 
100  public:
103  typedef std::shared_ptr<NonlinearConjugateGradientOptimizer> shared_ptr;
104 
105  protected:
108 
109  const NonlinearOptimizerParams &_params() const override { return params_; }
110 
111  public:
114  const NonlinearFactorGraph &graph, const Values &initialValues,
115  const Parameters &params = Parameters(),
116  const DirectionMethod &directionMethod = DirectionMethod::PolakRibiere);
117 
120 
125  GaussianFactorGraph::shared_ptr iterate() override;
126 
131  const Values &optimize() override;
132 };
133 
135 template <class S, class V, class W>
136 double lineSearch(const S &system, const V currentValues, const W &gradient) {
137  /* normalize it such that it becomes a unit vector */
138  const double g = gradient.norm();
139 
140  // perform the golden section search algorithm to decide the the optimal step
141  // size detail refer to http://en.wikipedia.org/wiki/Golden_section_search
142  const double phi = 0.5 * (1.0 + std::sqrt(5.0)), resphi = 2.0 - phi,
143  tau = 1e-5;
144  double minStep = -1.0 / g, maxStep = 0,
145  newStep = minStep + (maxStep - minStep) / (phi + 1.0);
146 
147  V newValues = system.advance(currentValues, newStep, gradient);
148  double newError = system.error(newValues);
149 
150  while (true) {
151  const bool flag = (maxStep - newStep > newStep - minStep);
152  const double testStep = flag ? newStep + resphi * (maxStep - newStep)
153  : newStep - resphi * (newStep - minStep);
154 
155  if ((maxStep - minStep) < tau * (std::abs(testStep) + std::abs(newStep))) {
156  return 0.5 * (minStep + maxStep);
157  }
158 
159  const V testValues = system.advance(currentValues, testStep, gradient);
160  const double testError = system.error(testValues);
161 
162  // update the working range
163  if (testError >= newError) {
164  if (flag)
165  maxStep = testStep;
166  else
167  minStep = testStep;
168  } else {
169  if (flag) {
170  minStep = newStep;
171  newStep = testStep;
172  newError = testError;
173  } else {
174  maxStep = newStep;
175  newStep = testStep;
176  newError = testError;
177  }
178  }
179  }
180  return 0.0;
181 }
182 
195 template <class S, class V>
196 std::tuple<V, int> nonlinearConjugateGradient(
197  const S &system, const V &initial, const NonlinearOptimizerParams &params,
198  const bool singleIteration,
199  const DirectionMethod &directionMethod = DirectionMethod::PolakRibiere,
200  const bool gradientDescent = false) {
201  // GTSAM_CONCEPT_MANIFOLD_TYPE(V)
202 
203  size_t iteration = 0;
204 
205  // check if we're already close enough
206  double currentError = system.error(initial);
207  if (currentError <= params.errorTol) {
208  if (params.verbosity >= NonlinearOptimizerParams::ERROR) {
209  std::cout << "Exiting, as error = " << currentError << " < "
210  << params.errorTol << std::endl;
211  }
212  return {initial, iteration};
213  }
214 
215  V currentValues = initial;
216  typename S::Gradient currentGradient = system.gradient(currentValues),
217  prevGradient, direction = currentGradient;
218 
219  /* do one step of gradient descent */
220  V prevValues = currentValues;
221  double prevError = currentError;
222  double alpha = lineSearch(system, currentValues, direction);
223  currentValues = system.advance(prevValues, alpha, direction);
224  currentError = system.error(currentValues);
225 
226  // Maybe show output
227  if (params.verbosity >= NonlinearOptimizerParams::ERROR)
228  std::cout << "Initial error: " << currentError << std::endl;
229 
230  // Iterative loop
231  do {
232  if (gradientDescent == true) {
233  direction = system.gradient(currentValues);
234  } else {
235  prevGradient = currentGradient;
236  currentGradient = system.gradient(currentValues);
237 
238  double beta;
239  switch (directionMethod) {
241  beta = FletcherReeves(currentGradient, prevGradient);
242  break;
244  beta = PolakRibiere(currentGradient, prevGradient);
245  break;
247  beta = HestenesStiefel(currentGradient, prevGradient, direction);
248  break;
250  beta = DaiYuan(currentGradient, prevGradient, direction);
251  break;
252  default:
253  throw std::runtime_error(
254  "NonlinearConjugateGradientOptimizer: Invalid directionMethod");
255  }
256 
257  direction = currentGradient + (beta * direction);
258  }
259 
260  alpha = lineSearch(system, currentValues, direction);
261 
262  prevValues = currentValues;
263  prevError = currentError;
264 
265  currentValues = system.advance(prevValues, alpha, direction);
266  currentError = system.error(currentValues);
267 
268  // User hook:
269  if (params.iterationHook)
270  params.iterationHook(iteration, prevError, currentError);
271 
272  // Maybe show output
273  if (params.verbosity >= NonlinearOptimizerParams::ERROR)
274  std::cout << "iteration: " << iteration
275  << ", currentError: " << currentError << std::endl;
276  } while (++iteration < params.maxIterations && !singleIteration &&
277  !checkConvergence(params.relativeErrorTol, params.absoluteErrorTol,
278  params.errorTol, prevError, currentError,
279  params.verbosity));
280 
281  // Printing if verbose
282  if (params.verbosity >= NonlinearOptimizerParams::ERROR &&
283  iteration >= params.maxIterations)
284  std::cout << "nonlinearConjugateGradient: Terminating because reached "
285  "maximum iterations"
286  << std::endl;
287 
288  return {currentValues, iteration};
289 }
290 
291 } // namespace gtsam
gtsam::PolakRibiere
double PolakRibiere(const Gradient &currentGradient, const Gradient &prevGradient)
Polak-Ribiere formula for computing β, the direction of steepest descent.
Definition: NonlinearConjugateGradientOptimizer.h:40
gtsam::NonlinearConjugateGradientOptimizer::System::Gradient
VectorValues Gradient
Definition: NonlinearConjugateGradientOptimizer.h:86
gtsam::DirectionMethod::DaiYuan
@ DaiYuan
gtsam::GaussianFactorGraph::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianFactorGraph.h:82
alpha
RealScalar alpha
Definition: level1_cplx_impl.h:147
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
d
static const double d[K][N]
Definition: igam.h:11
gtsam::NonlinearConjugateGradientOptimizer::System::Parameters
NonlinearOptimizerParams Parameters
Definition: NonlinearConjugateGradientOptimizer.h:87
gtsam::optimize
Point3 optimize(const NonlinearFactorGraph &graph, const Values &values, Key landmarkKey)
Definition: triangulation.cpp:177
gtsam::internal::DoglegState
Definition: DoglegOptimizer.cpp:54
gtsam::NonlinearConjugateGradientOptimizer::Base
NonlinearOptimizer Base
Definition: NonlinearConjugateGradientOptimizer.h:101
initial
Values initial
Definition: OdometryOptimize.cpp:2
gtsam::DirectionMethod::PolakRibiere
@ PolakRibiere
gtsam::lineSearch
double lineSearch(const S &system, const V currentValues, const W &gradient)
Definition: NonlinearConjugateGradientOptimizer.h:136
gtsam::NonlinearConjugateGradientOptimizer::System::State
Values State
Definition: NonlinearConjugateGradientOptimizer.h:85
NonlinearOptimizer.h
Base class and parameters for nonlinear optimization algorithms.
gtsam::NonlinearConjugateGradientOptimizer::Parameters
NonlinearOptimizerParams Parameters
Definition: NonlinearConjugateGradientOptimizer.h:102
gtsam::DirectionMethod
DirectionMethod
Definition: NonlinearConjugateGradientOptimizer.h:72
vanilla::params
static const SmartProjectionParams params
Definition: smartFactorScenarios.h:69
beta
double beta(double a, double b)
Definition: beta.c:61
gtsam::NonlinearConjugateGradientOptimizer::_params
const NonlinearOptimizerParams & _params() const override
Definition: NonlinearConjugateGradientOptimizer.h:109
gtsam::checkConvergence
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold, double errorThreshold, double currentError, double newError, NonlinearOptimizerParams::Verbosity verbosity)
Definition: NonlinearOptimizer.cpp:182
gtsam::NonlinearConjugateGradientOptimizer::System
Definition: NonlinearConjugateGradientOptimizer.h:83
gtsam::NonlinearOptimizerParams
Definition: NonlinearOptimizerParams.h:35
gtsam::VectorValues
Definition: VectorValues.h:74
gtsam::NonlinearConjugateGradientOptimizer::System::graph_
const NonlinearFactorGraph & graph_
Definition: NonlinearConjugateGradientOptimizer.h:90
gtsam::DaiYuan
double DaiYuan(const Gradient &currentGradient, const Gradient &prevGradient, const Gradient &direction)
The Dai-Yuan formula for computing β, the direction of steepest descent.
Definition: NonlinearConjugateGradientOptimizer.h:63
gtsam::NonlinearFactorGraph
Definition: NonlinearFactorGraph.h:55
gtsam::DirectionMethod::HestenesStiefel
@ HestenesStiefel
gtsam::NonlinearConjugateGradientOptimizer::shared_ptr
std::shared_ptr< NonlinearConjugateGradientOptimizer > shared_ptr
Definition: NonlinearConjugateGradientOptimizer.h:103
Manifold.h
Base class and basic functions for Manifold types.
g
void g(const string &key, int i)
Definition: testBTree.cpp:41
gtsam::DirectionMethod::FletcherReeves
@ FletcherReeves
gtsam::NonlinearConjugateGradientOptimizer
Definition: NonlinearConjugateGradientOptimizer.h:80
gtsam
traits
Definition: SFMdata.h:40
gtsam::NonlinearOptimizer
Definition: NonlinearOptimizer.h:75
error
static double error
Definition: testRot3.cpp:37
gtsam::Values
Definition: Values.h:65
gtsam::symbol_shorthand::W
Key W(std::uint64_t j)
Definition: inference/Symbol.h:170
gtsam::NonlinearConjugateGradientOptimizer::~NonlinearConjugateGradientOptimizer
~NonlinearConjugateGradientOptimizer() override
Destructor.
Definition: NonlinearConjugateGradientOptimizer.h:119
gtsam::FletcherReeves
double FletcherReeves(const Gradient &currentGradient, const Gradient &prevGradient)
Fletcher-Reeves formula for computing β, the direction of steepest descent.
Definition: NonlinearConjugateGradientOptimizer.h:30
initial
Definition: testScenarioRunner.cpp:148
V
MatrixXcd V
Definition: EigenSolver_EigenSolver_MatrixType.cpp:15
abs
#define abs(x)
Definition: datatypes.h:17
gtsam::NonlinearOptimizerParams::ERROR
@ ERROR
Definition: NonlinearOptimizerParams.h:39
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
gtsam::HestenesStiefel
double HestenesStiefel(const Gradient &currentGradient, const Gradient &prevGradient, const Gradient &direction)
Definition: NonlinearConjugateGradientOptimizer.h:52
gtsam::nonlinearConjugateGradient
std::tuple< V, int > nonlinearConjugateGradient(const S &system, const V &initial, const NonlinearOptimizerParams &params, const bool singleIteration, const DirectionMethod &directionMethod=DirectionMethod::PolakRibiere, const bool gradientDescent=false)
Definition: NonlinearConjugateGradientOptimizer.h:196
max
#define max(a, b)
Definition: datatypes.h:20
gtsam::NonlinearConjugateGradientOptimizer::System::System
System(const NonlinearFactorGraph &graph)
Definition: NonlinearConjugateGradientOptimizer.h:93
gtsam::NonlinearConjugateGradientOptimizer::params_
Parameters params_
Definition: NonlinearConjugateGradientOptimizer.h:106
ceres::sqrt
Jet< T, N > sqrt(const Jet< T, N > &f)
Definition: jet.h:418
S
DiscreteKey S(1, 2)
make_changelog.state
state
Definition: make_changelog.py:29


gtsam
Author(s):
autogenerated on Sun Dec 22 2024 04:12:23