46 const Gradient &
g)
const;
53 typedef std::shared_ptr<NonlinearConjugateGradientOptimizer>
shared_ptr;
66 const Values& initialValues,
const Parameters&
params = Parameters());
86 template<
class S,
class V,
class W>
87 double lineSearch(
const S &system,
const V currentValues,
const W &gradient) {
90 const double g = gradient.norm();
94 const double phi = 0.5 * (1.0 +
std::sqrt(5.0)), resphi = 2.0 - phi, tau =
96 double minStep = -1.0 /
g, maxStep = 0, newStep = minStep
97 + (maxStep - minStep) / (phi + 1.0);
99 V newValues = system.advance(currentValues, newStep, gradient);
100 double newError = system.error(newValues);
103 const bool flag = (maxStep - newStep > newStep - minStep) ?
true :
false;
104 const double testStep =
105 flag ? newStep + resphi * (maxStep - newStep) :
106 newStep - resphi * (newStep - minStep);
108 if ((maxStep - minStep)
110 return 0.5 * (minStep + maxStep);
113 const V testValues = system.advance(currentValues, testStep, gradient);
114 const double testError = system.error(testValues);
117 if (testError >= newError) {
126 newError = testError;
130 newError = testError;
146 template<
class S,
class V>
149 const bool singleIteration,
const bool gradientDescent =
false) {
153 size_t iteration = 0;
156 double currentError = system.error(
initial);
157 if (currentError <=
params.errorTol) {
159 std::cout <<
"Exiting, as error = " << currentError <<
" < "
160 <<
params.errorTol << std::endl;
166 typename S::Gradient currentGradient = system.gradient(currentValues),
167 prevGradient, direction = currentGradient;
170 V prevValues = currentValues;
171 double prevError = currentError;
173 currentValues = system.advance(prevValues,
alpha, direction);
174 currentError = system.error(currentValues);
178 std::cout <<
"Initial error: " << currentError << std::endl;
182 if (gradientDescent ==
true) {
183 direction = system.gradient(currentValues);
185 prevGradient = currentGradient;
186 currentGradient = system.gradient(currentValues);
189 currentGradient.dot(currentGradient - prevGradient)
190 / prevGradient.dot(prevGradient));
191 direction = currentGradient + (
beta * direction);
196 prevValues = currentValues;
197 prevError = currentError;
199 currentValues = system.advance(prevValues,
alpha, direction);
200 currentError = system.error(currentValues);
204 params.iterationHook(iteration, prevError, currentError);
208 std::cout <<
"iteration: " << iteration <<
", currentError: " << currentError << std::endl;
209 }
while (++iteration <
params.maxIterations && !singleIteration
211 params.errorTol, prevError, currentError,
params.verbosity));
215 && iteration >=
params.maxIterations)
217 <<
"nonlinearConjugateGradient: Terminating because reached maximum iterations"
220 return {currentValues, iteration};