Skip to content

Commit

Permalink
[SymForce] Add full stats to Python result object
Browse files Browse the repository at this point in the history
This adds all the fields from `cc_sym.OptimizationStats` to
`Optimizer.Result` in Python.  Adding these as cached properties, since

1) we don't want to expose things from the stats directly, since
   different types might make more sense

2) conversions of some of these things from C++ can be expensive, e.g.
   best_linearization, and we'd rather only convert if actually
   requested

To allow conversions to happen lazily we keep the original stats as a
private member.

Would be good to have a test at some point that the Result has all the
stuff the OptimizationStats does so they don't get out of sync again,
but not doing that for now.

Also passed through other arguments to `Optimizer.optimize`, i.e.
`num_iterations` and `populate_best_linearization`.

Topic: sf-stats
Reviewers: bradley,nathan,ryan-b,chao,hayk
GitOrigin-RevId: 751e3aa4095d8d2daed2d0c777902b84818e83b8
  • Loading branch information
aaron-skydio committed Mar 16, 2023
1 parent dfa4d18 commit 9266eb5
Show file tree
Hide file tree
Showing 11 changed files with 93 additions and 34 deletions.
6 changes: 2 additions & 4 deletions symforce/examples/robot_2d_localization/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@ def plot_solution(optimizer: Optimizer, result: Optimizer.Result, animated: bool
matplotlib animation instead of providing an interactive slider.
"""
# Pull out values from the result
values_per_iter = [
optimizer.load_iteration_values(stats.values) for stats in result.iteration_stats
]
values_per_iter = [optimizer.load_iteration_values(stats.values) for stats in result.iterations]

# Create the layout
fig = plt.figure()
Expand Down Expand Up @@ -101,7 +99,7 @@ def update_plot(slider_value: np.float64) -> None:
num = int(slider_value)

# Set iteration text and abort if we rejected this iteration
stats = result.iteration_stats[num]
stats = result.iterations[num]
if num > 0 and not stats.update_accepted:
text.set_text(f"Iteration: {num} (rejected)\nError: {stats.new_error:.6f}")
return
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def main() -> None:
result = optimizer.optimize(initial_values)

# Print some values
print(f"Num iterations: {len(result.iteration_stats) - 1}")
print(f"Num iterations: {len(result.iterations) - 1}")
print(f"Final error: {result.error():.6f}")

for i, pose in enumerate(result.optimized_values["poses"]):
Expand Down
6 changes: 2 additions & 4 deletions symforce/examples/robot_3d_localization/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@ def plot_solution(
matplotlib animation instead of providing an interactive slider.
"""
# Pull out values from the result
values_per_iter = [
optimizer.load_iteration_values(stats.values) for stats in result.iteration_stats
]
values_per_iter = [optimizer.load_iteration_values(stats.values) for stats in result.iterations]

# Create the layout
fig = plt.figure()
Expand Down Expand Up @@ -109,7 +107,7 @@ def update_plot(slider_value: np.float64) -> None:

# Set iteration text and abort if we rejected this iteration
if show_iteration_text:
stats = result.iteration_stats[num]
stats = result.iterations[num]
if num > 0 and not stats.update_accepted:
text.set_text(f"Iteration: {num} (rejected)\nError: {stats.new_error:.1f}")
return
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ def main() -> None:
result = optimizer.optimize(values)

# Print some values
print(f"Num iterations: {len(result.iteration_stats) - 1}")
print(f"Num iterations: {len(result.iterations) - 1}")
print(f"Final error: {result.error():.6f}")

for i, pose in enumerate(result.optimized_values["world_T_body"]):
Expand Down
2 changes: 2 additions & 0 deletions symforce/opt/optimization_stats.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ struct OptimizationStats {
// good step)
bool early_exited{false};

// The linearization at best_index (at optimized_values), filled out if
// populate_best_linearization=true
optional<Linearization<Scalar>> best_linearization{};

sparse_matrix_structure_t jacobian_sparsity;
Expand Down
91 changes: 73 additions & 18 deletions symforce/opt/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,16 @@
from __future__ import annotations

import dataclasses
import warnings
from dataclasses import dataclass
from functools import cached_property

import numpy as np

from lcmtypes.sym._index_entry_t import index_entry_t
from lcmtypes.sym._optimization_iteration_t import optimization_iteration_t
from lcmtypes.sym._optimizer_params_t import optimizer_params_t
from lcmtypes.sym._sparse_matrix_structure_t import sparse_matrix_structure_t
from lcmtypes.sym._values_t import values_t

from symforce import cc_sym
Expand Down Expand Up @@ -112,29 +117,79 @@ class Result:
optimized_values:
The best Values achieved during the optimization (Values with the smallest error)
iteration_stats:
iterations:
Per-iteration stats, if requested, like the error per iteration. If debug stats are
turned on, also the Values and linearization per iteration.
best_index:
The index into iterations for the iteration that produced the smallest error. I.e.
`result.iterations[best_index].values == optimized_values`. This is not guaranteed
to be the last iteration, if the optimizer tried additional steps which did not reduce
the error
early_exited:
Did the optimization early exit? This can happen because it converged successfully,
of because it was unable to make progress
best_index:
The index into iteration_stats for the iteration that produced the smallest error. I.e.
`result.iteration_stats[best_index].values == optimized_values`. This is not guaranteed
to be the last iteration, if the optimizer tried additional steps which did not reduce
the error
best_linearization:
The linearization at best_index (at optimized_values), filled out if
populate_best_linearization=True
jacobian_sparsity:
The sparsity pattern of the jacobian, filled out if debug_stats=True
linear_solver_ordering:
The ordering used for the linear solver, filled out if debug_stats=True
cholesky_factor_sparsity:
The sparsity pattern of the cholesky factor L, filled out if debug_stats=True
"""

initial_values: Values
optimized_values: Values
iteration_stats: T.Sequence[optimization_iteration_t]
early_exited: bool
best_index: int

# Private field holding the original stats - we expose fields of this through properties,
# since some of the conversions out of this are expensive
_stats: cc_sym.OptimizationStats

@cached_property
def iterations(self) -> T.List[optimization_iteration_t]:
return self._stats.iterations

@cached_property
def best_index(self) -> int:
return self._stats.best_index

@cached_property
def early_exited(self) -> bool:
return self._stats.early_exited

@cached_property
def best_linearization(self) -> T.Optional[cc_sym.Linearization]:
return self._stats.best_linearization

@cached_property
def jacobian_sparsity(self) -> sparse_matrix_structure_t:
return self._stats.jacobian_sparsity

@cached_property
def linear_solver_ordering(self) -> np.ndarray:
return self._stats.linear_solver_ordering

@cached_property
def cholesky_factor_sparsity(self) -> sparse_matrix_structure_t:
return self._stats.cholesky_factor_sparsity

@cached_property
def iteration_stats(self) -> T.Sequence[optimization_iteration_t]:
warnings.warn("iteration_stats is deprecated, use iterations", FutureWarning)
return self.iterations

def error(self) -> float:
return self.iteration_stats[self.best_index].new_error
"""
The lowest error achieved by the optimization (the error at optimized_values)
"""
return self.iterations[self.best_index].new_error

def __init__(
self,
Expand Down Expand Up @@ -232,13 +287,17 @@ def _cc_values(self, values: Values) -> cc_sym.Values:

return cc_values

def optimize(self, initial_guess: Values) -> Optimizer.Result:
def optimize(self, initial_guess: Values, **kwargs: T.Any) -> Optimizer.Result:
"""
Optimize from the given initial guess, and return the optimized Values and stats
Args:
initial_guess: A Values containing the initial guess, should contain at least all the
keys required by the `factors` passed to the constructor
keys required by the `factors` passed to the constructor
num_iterations: If < 0 (the default), uses the number of iterations specified by the
params at construction
populate_best_linearization: If true, the linearization at the best values will be
filled out in the stats
Returns:
The optimization results, with additional stats and debug information. See the
Expand All @@ -247,7 +306,7 @@ def optimize(self, initial_guess: Values) -> Optimizer.Result:
cc_values = self._cc_values(initial_guess)

try:
stats = self._cc_optimizer.optimize(cc_values)
stats = self._cc_optimizer.optimize(cc_values, **kwargs)
except ZeroDivisionError as ex:
raise ZeroDivisionError("ERROR: Division by zero - check your use of epsilon!") from ex

Expand All @@ -259,11 +318,7 @@ def optimize(self, initial_guess: Values) -> Optimizer.Result:
)

return Optimizer.Result(
initial_values=initial_guess,
optimized_values=optimized_values,
iteration_stats=stats.iterations,
best_index=stats.best_index,
early_exited=stats.early_exited,
initial_values=initial_guess, optimized_values=optimized_values, _stats=stats
)

def linearize(self, values: Values) -> cc_sym.Linearization:
Expand Down
4 changes: 3 additions & 1 deletion symforce/pybind/cc_optimization_stats.cc
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,9 @@ void AddOptimizationStatsWrapper(pybind11::module_ module) {
} else {
stats.best_linearization = *best_linearization;
}
})
},
"The linearization at best_index (at optimized_values), filled out if "
"populate_best_linearization=True")
.def("get_lcm_type", &sym::OptimizationStatsd::GetLcmType)
.def(py::pickle(
[](const sym::OptimizationStatsd& stats) { // __getstate__
Expand Down
6 changes: 5 additions & 1 deletion symforce/pybind/cc_sym.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -291,11 +291,15 @@ class OptimizationStats:
@property
def best_linearization(self) -> typing.Optional[Linearization]:
"""
The linearization at best_index (at optimized_values), filled out if populate_best_linearization=True
:type: object
"""
@best_linearization.setter
def best_linearization(self, arg1: Linearization) -> None:
pass
"""
The linearization at best_index (at optimized_values), filled out if populate_best_linearization=True
"""
@property
def cholesky_factor_sparsity(self) -> sparse_matrix_structure_t:
"""
Expand Down
2 changes: 1 addition & 1 deletion test/symforce_examples_robot_2d_localization_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def test_optimize(self) -> None:
# Solve and return the result
result = optimizer.optimize(initial_values)

self.assertAlmostEqual(result.iteration_stats[0].new_error, 6.396357536315918)
self.assertAlmostEqual(result.iterations[0].new_error, 6.396357536315918)
self.assertLess(result.error(), 1e-3)


Expand Down
2 changes: 1 addition & 1 deletion test/symforce_examples_robot_3d_localization_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def test_optimize(self) -> None:
# Solve and return the result
result = optimizer.optimize(values)

self.assertAlmostEqual(result.iteration_stats[0].new_error, 463700.5625)
self.assertAlmostEqual(result.iterations[0].new_error, 463700.5625)
self.assertLess(result.error(), 140)


Expand Down
4 changes: 2 additions & 2 deletions test/symforce_py_optimizer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,11 @@ def prior_residual(x: sf.Rot3, epsilon: sf.Scalar, x_prior: sf.Rot3) -> sf.V3:

logger.debug(f"Initial values: {result.initial_values}")
logger.debug(f"Optimized values: {result.optimized_values}")
logger.debug(f"Num iterations: {len(result.iteration_stats)}")
logger.debug(f"Num iterations: {len(result.iterations)}")
logger.debug(f"Final error: {result.error()}")

# Check values
self.assertEqual(len(result.iteration_stats), 7)
self.assertEqual(len(result.iterations), 7)
self.assertAlmostEqual(result.error(), 0.039, places=3)

# Check that we can pull out the variable blocks
Expand Down

0 comments on commit 9266eb5

Please sign in to comment.