logging_optimizer.py
Go to the documentation of this file.
1 """
2 Optimization with logging via a hook.
3 Author: Jing Wu and Frank Dellaert
4 """
5 # pylint: disable=invalid-name
6 
7 from gtsam import NonlinearOptimizer, NonlinearOptimizerParams
8 import gtsam
9 from typing import Any, Callable
10 
11 OPTIMIZER_PARAMS_MAP = {
15  gtsam.GncGaussNewtonOptimizer: gtsam.GaussNewtonParams,
16  gtsam.GncLMOptimizer: gtsam.LevenbergMarquardtParams
17 }
18 
19 
20 def optimize_using(OptimizerClass, hook, *args) -> gtsam.Values:
21  """ Wraps the constructor and "optimize()" call for an Optimizer together and adds an iteration
22  hook.
23  Example usage:
24  ```python
25  def hook(optimizer, error):
26  print("iteration {:}, error = {:}".format(optimizer.iterations(), error))
27  solution = optimize_using(gtsam.GaussNewtonOptimizer, hook, graph, init, params)
28  ```
29  Iteration hook's args are (optimizer, error) and return type should be None
30 
31  Args:
32  OptimizerClass (T): A NonlinearOptimizer class (e.g. GaussNewtonOptimizer,
33  LevenbergMarquardtOptimizer)
34  hook ([T, double] -> None): Function to callback after each iteration. Args are (optimizer,
35  error) and return should be None.
36  *args: Arguments that would be passed into the OptimizerClass constructor, usually:
37  graph, init, [params]
38  Returns:
39  (gtsam.Values): A Values object representing the optimization solution.
40  """
41  # Add the iteration hook to the NonlinearOptimizerParams
42  for arg in args:
44  arg.iterationHook = lambda iteration, error_before, error_after: hook(
45  optimizer, error_after)
46  break
47  else:
48  params = OPTIMIZER_PARAMS_MAP[OptimizerClass]()
49  params.iterationHook = lambda iteration, error_before, error_after: hook(
50  optimizer, error_after)
51  args = (*args, params)
52  # Construct Optimizer and optimize
53  optimizer = OptimizerClass(*args)
54  hook(optimizer, optimizer.error()) # Call hook once with init values to match behavior below
55  return optimizer.optimize()
56 
57 
58 def optimize(optimizer, check_convergence, hook):
59  """ Given an optimizer and a convergence check, iterate until convergence.
60  After each iteration, hook(optimizer, error) is called.
61  After the function, use values and errors to get the result.
62  Arguments:
63  optimizer (T): needs an iterate and an error function.
64  check_convergence: T * float * float -> bool
65  hook -- hook function to record the error
66  """
67  # the optimizer is created with default values which incur the error below
68  current_error = optimizer.error()
69  hook(optimizer, current_error)
70 
71  # Iterative loop. Cannot use `params.iterationHook` because we don't have access to params
72  # (backwards compatibility issue).
73  while True:
74  # Do next iteration
75  optimizer.iterate()
76  new_error = optimizer.error()
77  hook(optimizer, new_error)
78  if check_convergence(optimizer, current_error, new_error):
79  return
80  current_error = new_error
81 
82 
83 def gtsam_optimize(optimizer,
84  params,
85  hook):
86  """ Given an optimizer and params, iterate until convergence.
87  Recommend using optimize_using instead.
88  After each iteration, hook(optimizer) is called.
89  After the function, use values and errors to get the result.
90  Arguments:
91  optimizer {NonlinearOptimizer} -- Nonlinear optimizer
92  params {NonlinearOptimizarParams} -- Nonlinear optimizer parameters
93  hook -- hook function to record the error
94  """
95  def check_convergence(optimizer, current_error, new_error):
96  return (optimizer.iterations() >= params.getMaxIterations()) or (
97  gtsam.checkConvergence(params.getRelativeErrorTol(), params.getAbsoluteErrorTol(), params.getErrorTol(),
98  current_error, new_error)) or (
99  isinstance(optimizer, gtsam.LevenbergMarquardtOptimizer) and optimizer.lambda_() > params.getlambdaUpperBound())
100  optimize(optimizer, check_convergence, hook)
101  return optimizer.values()
def gtsam_optimize(optimizer, params, hook)
def optimize_using(OptimizerClass, hook, args)
bool isinstance(handle obj)
Definition: pytypes.h:700
def optimize(optimizer, check_convergence, hook)
GTSAM_EXPORT bool checkConvergence(const NonlinearOptimizerParams &params, double currentError, double newError)


gtsam
Author(s):
autogenerated on Tue Jul 4 2023 02:34:34