NonlinearConjugateGradientOptimizer.h
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
19 #pragma once
20 
21 #include <gtsam/base/Manifold.h>
23 
24 namespace gtsam {
25 
28 
29  /* a class for the nonlinearConjugateGradient template */
30  class System {
31  public:
32  typedef Values State;
35 
36  protected:
38 
39  public:
41  graph_(graph) {
42  }
43  double error(const State &state) const;
44  Gradient gradient(const State &state) const;
45  State advance(const State &current, const double alpha,
46  const Gradient &g) const;
47  };
48 
49 public:
50 
53  typedef std::shared_ptr<NonlinearConjugateGradientOptimizer> shared_ptr;
54 
55 protected:
57 
58  const NonlinearOptimizerParams& _params() const override {
59  return params_;
60  }
61 
62 public:
63 
66  const Values& initialValues, const Parameters& params = Parameters());
67 
70  }
71 
76  GaussianFactorGraph::shared_ptr iterate() override;
77 
82  const Values& optimize() override;
83 };
84 
86 template<class S, class V, class W>
87 double lineSearch(const S &system, const V currentValues, const W &gradient) {
88 
89  /* normalize it such that it becomes a unit vector */
90  const double g = gradient.norm();
91 
92  // perform the golden section search algorithm to decide the the optimal step size
93  // detail refer to http://en.wikipedia.org/wiki/Golden_section_search
94  const double phi = 0.5 * (1.0 + std::sqrt(5.0)), resphi = 2.0 - phi, tau =
95  1e-5;
96  double minStep = -1.0 / g, maxStep = 0, newStep = minStep
97  + (maxStep - minStep) / (phi + 1.0);
98 
99  V newValues = system.advance(currentValues, newStep, gradient);
100  double newError = system.error(newValues);
101 
102  while (true) {
103  const bool flag = (maxStep - newStep > newStep - minStep) ? true : false;
104  const double testStep =
105  flag ? newStep + resphi * (maxStep - newStep) :
106  newStep - resphi * (newStep - minStep);
107 
108  if ((maxStep - minStep)
109  < tau * (std::abs(testStep) + std::abs(newStep))) {
110  return 0.5 * (minStep + maxStep);
111  }
112 
113  const V testValues = system.advance(currentValues, testStep, gradient);
114  const double testError = system.error(testValues);
115 
116  // update the working range
117  if (testError >= newError) {
118  if (flag)
119  maxStep = testStep;
120  else
121  minStep = testStep;
122  } else {
123  if (flag) {
124  minStep = newStep;
125  newStep = testStep;
126  newError = testError;
127  } else {
128  maxStep = newStep;
129  newStep = testStep;
130  newError = testError;
131  }
132  }
133  }
134  return 0.0;
135 }
136 
146 template<class S, class V>
147 std::tuple<V, int> nonlinearConjugateGradient(const S &system,
148  const V &initial, const NonlinearOptimizerParams &params,
149  const bool singleIteration, const bool gradientDescent = false) {
150 
151  // GTSAM_CONCEPT_MANIFOLD_TYPE(V)
152 
153  size_t iteration = 0;
154 
155  // check if we're already close enough
156  double currentError = system.error(initial);
157  if (currentError <= params.errorTol) {
158  if (params.verbosity >= NonlinearOptimizerParams::ERROR) {
159  std::cout << "Exiting, as error = " << currentError << " < "
160  << params.errorTol << std::endl;
161  }
162  return {initial, iteration};
163  }
164 
165  V currentValues = initial;
166  typename S::Gradient currentGradient = system.gradient(currentValues),
167  prevGradient, direction = currentGradient;
168 
169  /* do one step of gradient descent */
170  V prevValues = currentValues;
171  double prevError = currentError;
172  double alpha = lineSearch(system, currentValues, direction);
173  currentValues = system.advance(prevValues, alpha, direction);
174  currentError = system.error(currentValues);
175 
176  // Maybe show output
177  if (params.verbosity >= NonlinearOptimizerParams::ERROR)
178  std::cout << "Initial error: " << currentError << std::endl;
179 
180  // Iterative loop
181  do {
182  if (gradientDescent == true) {
183  direction = system.gradient(currentValues);
184  } else {
185  prevGradient = currentGradient;
186  currentGradient = system.gradient(currentValues);
187  // Polak-Ribiere: beta = g'*(g_n-g_n-1)/g_n-1'*g_n-1
188  const double beta = std::max(0.0,
189  currentGradient.dot(currentGradient - prevGradient)
190  / prevGradient.dot(prevGradient));
191  direction = currentGradient + (beta * direction);
192  }
193 
194  alpha = lineSearch(system, currentValues, direction);
195 
196  prevValues = currentValues;
197  prevError = currentError;
198 
199  currentValues = system.advance(prevValues, alpha, direction);
200  currentError = system.error(currentValues);
201 
202  // User hook:
203  if (params.iterationHook)
204  params.iterationHook(iteration, prevError, currentError);
205 
206  // Maybe show output
207  if (params.verbosity >= NonlinearOptimizerParams::ERROR)
208  std::cout << "iteration: " << iteration << ", currentError: " << currentError << std::endl;
209  } while (++iteration < params.maxIterations && !singleIteration
210  && !checkConvergence(params.relativeErrorTol, params.absoluteErrorTol,
211  params.errorTol, prevError, currentError, params.verbosity));
212 
213  // Printing if verbose
214  if (params.verbosity >= NonlinearOptimizerParams::ERROR
215  && iteration >= params.maxIterations)
216  std::cout
217  << "nonlinearConjugateGradient: Terminating because reached maximum iterations"
218  << std::endl;
219 
220  return {currentValues, iteration};
221 }
222 
223 } // \ namespace gtsam
224 
gtsam::NonlinearConjugateGradientOptimizer::System::Gradient
VectorValues Gradient
Definition: NonlinearConjugateGradientOptimizer.h:33
gtsam::GaussianFactorGraph::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianFactorGraph.h:82
alpha
RealScalar alpha
Definition: level1_cplx_impl.h:147
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
gtsam::NonlinearConjugateGradientOptimizer::System::Parameters
NonlinearOptimizerParams Parameters
Definition: NonlinearConjugateGradientOptimizer.h:34
gtsam::optimize
Point3 optimize(const NonlinearFactorGraph &graph, const Values &values, Key landmarkKey)
Definition: triangulation.cpp:177
gtsam::internal::DoglegState
Definition: DoglegOptimizer.cpp:54
gtsam::NonlinearConjugateGradientOptimizer::Base
NonlinearOptimizer Base
Definition: NonlinearConjugateGradientOptimizer.h:51
initial
Values initial
Definition: OdometryOptimize.cpp:2
gtsam::lineSearch
double lineSearch(const S &system, const V currentValues, const W &gradient)
Definition: NonlinearConjugateGradientOptimizer.h:87
gtsam::NonlinearConjugateGradientOptimizer::System::State
Values State
Definition: NonlinearConjugateGradientOptimizer.h:32
NonlinearOptimizer.h
Base class and parameters for nonlinear optimization algorithms.
gtsam::NonlinearConjugateGradientOptimizer::Parameters
NonlinearOptimizerParams Parameters
Definition: NonlinearConjugateGradientOptimizer.h:52
vanilla::params
static const SmartProjectionParams params
Definition: smartFactorScenarios.h:69
beta
double beta(double a, double b)
Definition: beta.c:61
gtsam::NonlinearConjugateGradientOptimizer::_params
const NonlinearOptimizerParams & _params() const override
Definition: NonlinearConjugateGradientOptimizer.h:58
gtsam::checkConvergence
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold, double errorThreshold, double currentError, double newError, NonlinearOptimizerParams::Verbosity verbosity)
Definition: NonlinearOptimizer.cpp:182
gtsam::NonlinearConjugateGradientOptimizer::System
Definition: NonlinearConjugateGradientOptimizer.h:30
gtsam::NonlinearOptimizerParams
Definition: NonlinearOptimizerParams.h:35
gtsam::VectorValues
Definition: VectorValues.h:74
gtsam::NonlinearConjugateGradientOptimizer::System::graph_
const NonlinearFactorGraph & graph_
Definition: NonlinearConjugateGradientOptimizer.h:37
gtsam::NonlinearFactorGraph
Definition: NonlinearFactorGraph.h:55
gtsam::NonlinearConjugateGradientOptimizer::shared_ptr
std::shared_ptr< NonlinearConjugateGradientOptimizer > shared_ptr
Definition: NonlinearConjugateGradientOptimizer.h:53
Manifold.h
Base class and basic functions for Manifold types.
g
void g(const string &key, int i)
Definition: testBTree.cpp:41
gtsam::NonlinearConjugateGradientOptimizer
Definition: NonlinearConjugateGradientOptimizer.h:27
gtsam
traits
Definition: chartTesting.h:28
gtsam::NonlinearOptimizer
Definition: NonlinearOptimizer.h:75
error
static double error
Definition: testRot3.cpp:37
gtsam::Values
Definition: Values.h:65
gtsam::symbol_shorthand::W
Key W(std::uint64_t j)
Definition: inference/Symbol.h:170
gtsam::NonlinearConjugateGradientOptimizer::~NonlinearConjugateGradientOptimizer
~NonlinearConjugateGradientOptimizer() override
Destructor.
Definition: NonlinearConjugateGradientOptimizer.h:69
initial
Definition: testScenarioRunner.cpp:148
V
MatrixXcd V
Definition: EigenSolver_EigenSolver_MatrixType.cpp:15
abs
#define abs(x)
Definition: datatypes.h:17
gtsam::NonlinearOptimizerParams::ERROR
@ ERROR
Definition: NonlinearOptimizerParams.h:39
gtsam::nonlinearConjugateGradient
std::tuple< V, int > nonlinearConjugateGradient(const S &system, const V &initial, const NonlinearOptimizerParams &params, const bool singleIteration, const bool gradientDescent=false)
Definition: NonlinearConjugateGradientOptimizer.h:147
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
max
#define max(a, b)
Definition: datatypes.h:20
gtsam::NonlinearConjugateGradientOptimizer::System::System
System(const NonlinearFactorGraph &graph)
Definition: NonlinearConjugateGradientOptimizer.h:40
gtsam::NonlinearConjugateGradientOptimizer::params_
Parameters params_
Definition: NonlinearConjugateGradientOptimizer.h:56
ceres::sqrt
Jet< T, N > sqrt(const Jet< T, N > &f)
Definition: jet.h:418
S
DiscreteKey S(1, 2)
make_changelog.state
state
Definition: make_changelog.py:28


gtsam
Author(s):
autogenerated on Thu Jun 13 2024 03:03:49