2 Optimization with logging via a hook. 3 Author: Jing Wu and Frank Dellaert 7 from gtsam
import NonlinearOptimizer, NonlinearOptimizerParams
11 def optimize(optimizer, check_convergence, hook):
12 """ Given an optimizer and a convergence check, iterate until convergence. 13 After each iteration, hook(optimizer, error) is called. 14 After the function, use values and errors to get the result. 16 optimizer (T): needs an iterate and an error function. 17 check_convergence: T * float * float -> bool 18 hook -- hook function to record the error 21 current_error = optimizer.error()
22 hook(optimizer, current_error)
28 new_error = optimizer.error()
29 hook(optimizer, new_error)
30 if check_convergence(optimizer, current_error, new_error):
32 current_error = new_error
38 """ Given an optimizer and params, iterate until convergence. 39 After each iteration, hook(optimizer) is called. 40 After the function, use values and errors to get the result. 42 optimizer {NonlinearOptimizer} -- Nonlinear optimizer 43 params {NonlinearOptimizarParams} -- Nonlinear optimizer parameters 44 hook -- hook function to record the error 46 def check_convergence(optimizer, current_error, new_error):
47 return (optimizer.iterations() >= params.getMaxIterations())
or (
49 current_error, new_error))
or (
51 optimize(optimizer, check_convergence, hook)
52 return optimizer.values()
def gtsam_optimize(optimizer, params, hook)
bool isinstance(handle obj)
def optimize(optimizer, check_convergence, hook)
GTSAM_EXPORT bool checkConvergence(const NonlinearOptimizerParams ¶ms, double currentError, double newError)