23 #include <boost/tuple/tuple.hpp> 44 double error(
const State &state)
const;
45 Gradient gradient(
const State &state)
const;
46 State advance(
const State ¤t,
const double alpha,
47 const Gradient &
g)
const;
54 typedef boost::shared_ptr<NonlinearConjugateGradientOptimizer>
shared_ptr;
67 const Values& initialValues,
const Parameters&
params = Parameters());
87 template<
class S,
class V,
class W>
88 double lineSearch(
const S &system,
const V currentValues,
const W &gradient) {
91 const double g = gradient.norm();
95 const double phi = 0.5 * (1.0 +
std::sqrt(5.0)), resphi = 2.0 - phi, tau =
97 double minStep = -1.0 /
g, maxStep = 0, newStep = minStep
98 + (maxStep - minStep) / (phi + 1.0);
100 V newValues = system.advance(currentValues, newStep, gradient);
101 double newError = system.error(newValues);
104 const bool flag = (maxStep - newStep > newStep - minStep) ?
true :
false;
105 const double testStep =
106 flag ? newStep + resphi * (maxStep - newStep) :
107 newStep - resphi * (newStep - minStep);
109 if ((maxStep - minStep)
111 return 0.5 * (minStep + maxStep);
114 const V testValues = system.advance(currentValues, testStep, gradient);
115 const double testError = system.error(testValues);
118 if (testError >= newError) {
127 newError = testError;
131 newError = testError;
147 template<
class S,
class V>
150 const bool singleIteration,
const bool gradientDescent =
false) {
154 size_t iteration = 0;
157 double currentError = system.error(initial);
158 if (currentError <= params.
errorTol) {
160 std::cout <<
"Exiting, as error = " << currentError <<
" < " 163 return boost::tie(initial, iteration);
167 typename S::Gradient currentGradient = system.gradient(currentValues),
168 prevGradient, direction = currentGradient;
171 V prevValues = currentValues;
172 double prevError = currentError;
174 currentValues = system.advance(prevValues, alpha, direction);
175 currentError = system.error(currentValues);
179 std::cout <<
"Initial error: " << currentError << std::endl;
183 if (gradientDescent ==
true) {
184 direction = system.gradient(currentValues);
186 prevGradient = currentGradient;
187 currentGradient = system.gradient(currentValues);
190 currentGradient.dot(currentGradient - prevGradient)
191 / prevGradient.dot(prevGradient));
192 direction = currentGradient + (beta * direction);
195 alpha =
lineSearch(system, currentValues, direction);
197 prevValues = currentValues;
198 prevError = currentError;
200 currentValues = system.advance(prevValues, alpha, direction);
201 currentError = system.error(currentValues);
209 std::cout <<
"iteration: " << iteration <<
", currentError: " << currentError << std::endl;
210 }
while (++iteration < params.
maxIterations && !singleIteration
218 <<
"nonlinearConjugateGradient: Terminating because reached maximum iterations" 221 return boost::tie(currentValues, iteration);
const NonlinearFactorGraph & graph_
IterationHook iterationHook
NonlinearOptimizerParams Parameters
boost::shared_ptr< This > shared_ptr
shared_ptr to this class
boost::tuple< V, int > nonlinearConjugateGradient(const S &system, const V &initial, const NonlinearOptimizerParams ¶ms, const bool singleIteration, const bool gradientDescent=false)
~NonlinearConjugateGradientOptimizer() override
Destructor.
EIGEN_DEVICE_FUNC const SqrtReturnType sqrt() const
Verbosity verbosity
The printing verbosity during optimization (default SILENT)
NonlinearOptimizerParams Parameters
NonlinearFactorGraph graph
double absoluteErrorTol
The maximum absolute error decrease to stop iterating (default 1e-5)
void g(const string &key, int i)
boost::shared_ptr< NonlinearConjugateGradientOptimizer > shared_ptr
double relativeErrorTol
The maximum relative error decrease to stop iterating (default 1e-5)
double errorTol
The maximum total error to stop iterating (default 0.0)
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold, double errorThreshold, double currentError, double newError, NonlinearOptimizerParams::Verbosity verbosity)
Point3 optimize(const NonlinearFactorGraph &graph, const Values &values, Key landmarkKey)
Base class and basic functions for Manifold types.
Array< double, 1, 3 > e(1./3., 0.5, 2.)
static SmartStereoProjectionParams params
System(const NonlinearFactorGraph &graph)
double lineSearch(const S &system, const V currentValues, const W &gradient)
Base class and parameters for nonlinear optimization algorithms.
size_t maxIterations
The maximum iterations to stop iterating (default 100)
const NonlinearOptimizerParams & _params() const override