Skip to content

Commit

Permalink
Allow passing of ScalingOptions to scaling argument
Browse files Browse the repository at this point in the history
  • Loading branch information
timmens committed Jul 30, 2024
1 parent 03ec822 commit 2fc41ff
Show file tree
Hide file tree
Showing 14 changed files with 113 additions and 54 deletions.
1 change: 0 additions & 1 deletion src/estimagic/estimate_ml.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,6 @@ def estimate_ml(
func_eval=loglike_eval,
primary_key="contributions",
scaling=False,
scaling_options=None,
derivative_eval=jacobian_eval,
)

Expand Down
1 change: 0 additions & 1 deletion src/estimagic/estimate_msm.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,6 @@ def helper(params):
func_eval=func_eval,
primary_key="contributions",
scaling=False,
scaling_options=None,
derivative_eval=jacobian_eval,
)

Expand Down
10 changes: 10 additions & 0 deletions src/optimagic/deprecations.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,16 @@ def throw_criterion_and_derivative_kwargs_future_warning():
warnings.warn(msg, FutureWarning)


def throw_scaling_options_future_warning():
msg = (
"The `scaling_options` argument will be deprecated in favor of `scaling` in "
"optimagic version 0.6.0 and later. You can simply pass the scaling options to "
"`scaling` instead of `scaling_options`. Using `scaling_options` will become "
"an error in optimagic version 0.6.0 and later."
)
warnings.warn(msg, FutureWarning)


def replace_and_warn_about_deprecated_algo_options(algo_options):
if not isinstance(algo_options, dict):
return algo_options
Expand Down
4 changes: 2 additions & 2 deletions src/optimagic/optimization/check_arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from pathlib import Path

from optimagic.shared.check_option_dicts import check_numdiff_options
from optimagic.options import ScalingOptions


def check_optimize_kwargs(**kwargs):
Expand All @@ -22,8 +23,7 @@ def check_optimize_kwargs(**kwargs):
"error_handling": str,
"error_penalty": dict,
"cache_size": (int, float),
"scaling": bool,
"scaling_options": dict,
"scaling": (bool, ScalingOptions),
"multistart": bool,
"multistart_options": dict,
}
Expand Down
39 changes: 35 additions & 4 deletions src/optimagic/optimization/optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
)
from optimagic import deprecations
from optimagic.deprecations import replace_and_warn_about_deprecated_algo_options
from optimagic.options import ScalingOptions


def maximize(
Expand Down Expand Up @@ -350,6 +351,9 @@ def _optimize(
else fun_and_jac_kwargs
)

if scaling_options is not None:
deprecations.throw_scaling_options_future_warning()

algo_options = replace_and_warn_about_deprecated_algo_options(algo_options)

# ==================================================================================
Expand Down Expand Up @@ -452,7 +456,7 @@ def _optimize(
raise NotImplementedError(msg)

# ==================================================================================
# Set default values and check options
# Set default values, consolidate deprecated options, and check options
# ==================================================================================
fun_kwargs = _setdefault(fun_kwargs, {})
constraints = _setdefault(constraints, [])
Expand All @@ -461,12 +465,13 @@ def _optimize(
fun_and_jac_kwargs = _setdefault(fun_and_jac_kwargs, {})
numdiff_options = _setdefault(numdiff_options, {})
log_options = _setdefault(log_options, {})
scaling_options = _setdefault(scaling_options, {})
error_penalty = _setdefault(error_penalty, {})
multistart_options = _setdefault(multistart_options, {})
if logging:
logging = Path(logging)

scaling = _consolidate_scaling_options(scaling, scaling_options)

if not skip_checks:
check_optimize_kwargs(
direction=direction,
Expand All @@ -486,7 +491,6 @@ def _optimize(
error_handling=error_handling,
error_penalty=error_penalty,
scaling=scaling,
scaling_options=scaling_options,
multistart=multistart,
multistart_options=multistart_options,
)
Expand Down Expand Up @@ -620,7 +624,6 @@ def _optimize(
func_eval=first_crit_eval,
primary_key=algo_info.primary_criterion_entry,
scaling=scaling,
scaling_options=scaling_options,
derivative_eval=used_deriv,
soft_lower_bounds=soft_lower_bounds,
soft_upper_bounds=soft_upper_bounds,
Expand Down Expand Up @@ -878,6 +881,34 @@ def _setdefault(candidate, default):
return out


def _consolidate_scaling_options(scaling, scaling_options):
"""Consolidate scaling options."""
if isinstance(scaling, ScalingOptions) and scaling_options is not None:
msg = (
"You can not provide options through scaling and scaling_options. The "
"scaling_options argument is deprecated in favor of the scaling argument."
"You can pass options to the scaling argument directly using the "
"ScalingOptions class."
)
raise ValueError(msg)

if isinstance(scaling, bool):
if scaling and scaling_options is None:
scaling = ScalingOptions()
elif scaling:
try:
scaling = ScalingOptions(**scaling_options)
except TypeError as e:
msg = (
"The scaling_options argument contains invalid keys, and is "
"deprecated in favor of the scaling argument. You can pass options "
"to the scaling argument directly using the ScalingOptions class."
)
raise ValueError(msg) from e

return scaling


def _fill_multistart_options_with_defaults(options, params, x, params_to_internal):
"""Fill options for multistart optimization with defaults."""
defaults = {
Expand Down
9 changes: 9 additions & 0 deletions src/optimagic/options.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from dataclasses import dataclass
from typing import Literal


@dataclass
class ScalingOptions:
method: Literal["start_values", "bound"] = "start_values"
clipping_value: float = 0.1
magnitude: float = 1.0
2 changes: 0 additions & 2 deletions src/optimagic/parameters/constraint_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def count_free_params(params, constraints=None, lower_bounds=None, upper_bounds=
func_eval=3,
primary_key="value",
scaling=False,
scaling_options={},
)

return int(internal_params.free_mask.sum())
Expand Down Expand Up @@ -52,5 +51,4 @@ def check_constraints(params, constraints, lower_bounds=None, upper_bounds=None)
func_eval=3,
primary_key="value",
scaling=False,
scaling_options={},
)
6 changes: 2 additions & 4 deletions src/optimagic/parameters/conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ def get_converter(
func_eval,
primary_key,
scaling,
scaling_options,
derivative_eval=None,
soft_lower_bounds=None,
soft_upper_bounds=None,
Expand Down Expand Up @@ -46,8 +45,8 @@ def get_converter(
primary_key (str): One of "value", "contributions" and "root_contributions".
Used to determine how the function and derivative output has to be
transformed for the optimzer.
scaling (bool): Whether scaling should be performed.
scaling_options (dict): User provided scaling options.
scaling (bool | ScalingOptions): Scaling options. If False, no scaling is
performed.
derivative_eval (dict, pytree or None): Evaluation of the derivative of
func at params. Used for consistency checks.
soft_lower_bounds (pytree): As lower_bounds
Expand Down Expand Up @@ -105,7 +104,6 @@ def get_converter(
scale_converter, scaled_params = get_scale_converter(
internal_params=internal_params,
scaling=scaling,
scaling_options=scaling_options,
)

def _params_to_internal(params):
Expand Down
45 changes: 21 additions & 24 deletions src/optimagic/parameters/scale_conversion.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,30 @@
from functools import partial
from typing import NamedTuple, Callable
from typing import NamedTuple, Callable, Literal

import numpy as np

from optimagic.parameters.space_conversion import InternalParams
from optimagic.options import ScalingOptions


class ScaleConverter(NamedTuple):
params_to_internal: Callable
params_from_internal: Callable
derivative_to_internal: Callable
derivative_from_internal: Callable


def get_scale_converter(
internal_params,
scaling,
scaling_options,
):
internal_params: InternalParams,
scaling: Literal[False] | ScalingOptions,
) -> tuple[ScaleConverter, InternalParams]:
"""Get a converter between scaled and unscaled parameters.
Args:
internal_params (InternalParams): NamedTuple of internal and possibly
reparametrized but not yet scaled parameter values and bounds.
func (callable): The criterion function. Possibly used to calculate a scaling
factor.
scaling (bool): Whether scaling should be done.
scaling_options (dict): User provided scaling options.
scaling (Literal[False] | ScalingOptions): Scaling options. If False, no scaling
is performed.
Returns:
ScaleConverter: NamedTuple with methods to convert between scaled and unscaled
Expand All @@ -39,12 +44,11 @@ def get_scale_converter(
if not scaling:
return _fast_path_scale_converter(), internal_params

scaling_options = {} if scaling_options is None else scaling_options
valid_keys = {"method", "clipping_value", "magnitude"}
scaling_options = {k: v for k, v in scaling_options.items() if k in valid_keys}

factor, offset = calculate_scaling_factor_and_offset(
internal_params=internal_params, **scaling_options
internal_params=internal_params,
method=scaling.method,
clipping_value=scaling.clipping_value,
magnitude=scaling.magnitude,
)

_params_to_internal = partial(
Expand Down Expand Up @@ -94,13 +98,6 @@ def _derivative_from_internal(derivative):
return converter, params


class ScaleConverter(NamedTuple):
params_to_internal: Callable
params_from_internal: Callable
derivative_to_internal: Callable
derivative_from_internal: Callable


def _fast_path_scale_converter():
converter = ScaleConverter(
params_to_internal=lambda x: x,
Expand All @@ -113,9 +110,9 @@ def _fast_path_scale_converter():

def calculate_scaling_factor_and_offset(
internal_params,
method="start_values",
clipping_value=0.1,
magnitude=1,
method,
clipping_value,
magnitude,
):
x = internal_params.values
lower_bounds = internal_params.lower_bounds
Expand Down
1 change: 0 additions & 1 deletion src/optimagic/visualization/slice_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,6 @@ def slice_plot(
func_eval=func_eval,
primary_key="value",
scaling=False,
scaling_options=None,
)

n_params = len(internal_params.values)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ def test_criterion_and_derivative_template(
func_eval=crit(base_inputs["params"]),
primary_key="value",
scaling=False,
scaling_options=None,
derivative_eval=None,
)
inputs = {k: v for k, v in base_inputs.items() if k != "params"}
Expand Down Expand Up @@ -123,7 +122,6 @@ def test_internal_criterion_with_penalty(base_inputs, direction):
func_eval=sos_scalar_criterion(base_inputs["params"]),
primary_key="value",
scaling=False,
scaling_options=None,
derivative_eval=None,
)
inputs = {k: v for k, v in base_inputs.items() if k != "params"}
Expand Down
27 changes: 26 additions & 1 deletion tests/optimagic/optimization/test_optimizations_with_scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import numpy as np
import pandas as pd
from optimagic.options import ScalingOptions
import pytest
from optimagic.config import IS_PYBOBYQA_INSTALLED
from optimagic.optimization.optimize import minimize
Expand Down Expand Up @@ -32,7 +33,7 @@ def sos_gradient(params):


@pytest.mark.parametrize("algorithm, scaling_options", PARAMETRIZATION)
def test_optimizations_with_scaling(algorithm, scaling_options):
def test_optimizations_with_scaling_via_dict_options(algorithm, scaling_options):
params = pd.DataFrame()
params["value"] = np.arange(5)
params["lower_bound"] = [-1, 0, 0, 0, 0]
Expand All @@ -52,3 +53,27 @@ def test_optimizations_with_scaling(algorithm, scaling_options):

expected_solution = np.array([0, 0, 0, 3, 4])
aaae(res.params["value"].to_numpy(), expected_solution)


@pytest.mark.parametrize("algorithm, scaling_options", PARAMETRIZATION)
def test_optimizations_with_scaling(algorithm, scaling_options):
params = pd.DataFrame()
params["value"] = np.arange(5)
params["lower_bound"] = [-1, 0, 0, 0, 0]
params["upper_bound"] = np.full(5, 10)

constraints = [{"loc": [3, 4], "type": "fixed"}]

scaling = ScalingOptions(**scaling_options)

res = minimize(
fun=sos_scalar_criterion,
params=params,
constraints=constraints,
algorithm=algorithm,
scaling=scaling,
jac=sos_gradient,
)

expected_solution = np.array([0, 0, 0, 3, 4])
aaae(res.params["value"].to_numpy(), expected_solution)
7 changes: 2 additions & 5 deletions tests/optimagic/parameters/test_conversion.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
from optimagic.options import ScalingOptions
import pytest
from optimagic.parameters.conversion import (
_is_fast_deriv_eval,
Expand All @@ -19,7 +20,6 @@ def test_get_converter_fast_case():
derivative_eval=2 * np.arange(3),
primary_key="value",
scaling=False,
scaling_options=None,
)

aaae(internal.values, np.arange(3))
Expand All @@ -45,7 +45,6 @@ def test_get_converter_with_constraints_and_bounds():
derivative_eval=2 * np.arange(3),
primary_key="value",
scaling=False,
scaling_options=None,
)

aaae(internal.values, np.arange(2))
Expand All @@ -70,8 +69,7 @@ def test_get_converter_with_scaling():
func_eval=3,
derivative_eval=2 * np.arange(3),
primary_key="value",
scaling=True,
scaling_options={"method": "start_values", "clipping_value": 0.5},
scaling=ScalingOptions(method="start_values", clipping_value=0.5),
)

aaae(internal.values, np.array([0, 1, 1]))
Expand All @@ -98,7 +96,6 @@ def test_get_converter_with_trees():
derivative_eval={"a": 0, "b": 2, "c": 4},
primary_key="value",
scaling=False,
scaling_options=None,
)

aaae(internal.values, np.arange(3))
Expand Down
Loading

0 comments on commit 2fc41ff

Please sign in to comment.