2 Optimization with logging via a hook.
3 Author: Jing Wu and Frank Dellaert
7 from gtsam
import NonlinearOptimizer, NonlinearOptimizerParams
9 from typing
import Any, Callable
11 OPTIMIZER_PARAMS_MAP = {
21 """ Wraps the constructor and "optimize()" call for an Optimizer together and adds an iteration
25 def hook(optimizer, error):
26 print("iteration {:}, error = {:}".format(optimizer.iterations(), error))
27 solution = optimize_using(gtsam.GaussNewtonOptimizer, hook, graph, init, params)
29 Iteration hook's args are (optimizer, error) and return type should be None
32 OptimizerClass (T): A NonlinearOptimizer class (e.g. GaussNewtonOptimizer,
33 LevenbergMarquardtOptimizer)
34 hook ([T, double] -> None): Function to callback after each iteration. Args are (optimizer,
35 error) and return should be None.
36 *args: Arguments that would be passed into the OptimizerClass constructor, usually:
39 (gtsam.Values): A Values object representing the optimization solution.
44 arg.iterationHook =
lambda iteration, error_before, error_after: hook(
45 optimizer, error_after)
48 params = OPTIMIZER_PARAMS_MAP[OptimizerClass]()
49 params.iterationHook =
lambda iteration, error_before, error_after: hook(
50 optimizer, error_after)
51 args = (*args, params)
53 optimizer = OptimizerClass(*args)
54 hook(optimizer, optimizer.error())
55 return optimizer.optimize()
58 def optimize(optimizer, check_convergence, hook):
59 """ Given an optimizer and a convergence check, iterate until convergence.
60 After each iteration, hook(optimizer, error) is called.
61 After the function, use values and errors to get the result.
63 optimizer (T): needs an iterate and an error function.
64 check_convergence: T * float * float -> bool
65 hook -- hook function to record the error
68 current_error = optimizer.error()
69 hook(optimizer, current_error)
76 new_error = optimizer.error()
77 hook(optimizer, new_error)
78 if check_convergence(optimizer, current_error, new_error):
80 current_error = new_error
86 """ Given an optimizer and params, iterate until convergence.
87 Recommend using optimize_using instead.
88 After each iteration, hook(optimizer) is called.
89 After the function, use values and errors to get the result.
91 optimizer {NonlinearOptimizer} -- Nonlinear optimizer
92 params {NonlinearOptimizarParams} -- Nonlinear optimizer parameters
93 hook -- hook function to record the error
95 def check_convergence(optimizer, current_error, new_error):
96 return (optimizer.iterations() >= params.getMaxIterations())
or (
98 current_error, new_error))
or (
100 optimize(optimizer, check_convergence, hook)
101 return optimizer.values()