diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 15a675a6..f71f5953 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -22,7 +22,7 @@ from easyscience.Utils.classTools import addLoggedProp from .core import ComponentSerializer -from .new_variable import Parameter as new_Parameter +from .new_variable import Parameter as NewParameter from .new_variable.descriptor_base import DescriptorBase from .Variable import Descriptor from .Variable import Parameter @@ -161,7 +161,7 @@ def constraints(self) -> List[C]: return constraints ## TODO clean when full move to new_variable - def get_parameters(self) -> Union[List[Parameter], List[new_Parameter]]: + def get_parameters(self) -> Union[List[Parameter], List[NewParameter]]: """ Get all parameter objects as a list. @@ -171,7 +171,7 @@ def get_parameters(self) -> Union[List[Parameter], List[new_Parameter]]: for key, item in self._kwargs.items(): if hasattr(item, 'get_parameters'): par_list = [*par_list, *item.get_parameters()] - elif isinstance(item, Parameter) or isinstance(item, new_Parameter): + elif isinstance(item, Parameter) or isinstance(item, NewParameter): par_list.append(item) return par_list @@ -191,7 +191,7 @@ def _get_linkable_attributes(self) -> List[V]: return item_list ## TODO clean when full move to new_variable - def get_fit_parameters(self) -> Union[List[Parameter], List[new_Parameter]]: + def get_fit_parameters(self) -> Union[List[Parameter], List[NewParameter]]: """ Get all objects which can be fitted (and are not fixed) as a list. @@ -201,7 +201,7 @@ def get_fit_parameters(self) -> Union[List[Parameter], List[new_Parameter]]: for key, item in self._kwargs.items(): if hasattr(item, 'get_fit_parameters'): fit_list = [*fit_list, *item.get_fit_parameters()] - elif isinstance(item, Parameter) or isinstance(item, new_Parameter): + elif isinstance(item, Parameter) or isinstance(item, NewParameter): if item.enabled and not item.fixed: fit_list.append(item) return fit_list diff --git a/src/easyscience/fitting/minimizers/minimizer_base.py b/src/easyscience/fitting/minimizers/minimizer_base.py index fb4ce8bb..35cf597f 100644 --- a/src/easyscience/fitting/minimizers/minimizer_base.py +++ b/src/easyscience/fitting/minimizers/minimizer_base.py @@ -5,49 +5,61 @@ from abc import ABCMeta from abc import abstractmethod from typing import Callable +from typing import Dict +from typing import List from typing import Optional +from typing import Tuple from typing import Union import numpy as np +from easyscience.Objects.ObjectClasses import BaseObj +from easyscience.Objects.Variable import Parameter + +from ..Constraints import ObjConstraint +from .utils import FitError from .utils import FitResults +MINIMIZER_PARAMETER_PREFIX = 'p' + class MinimizerBase(metaclass=ABCMeta): """ - This template class is the basis for all fitting engines in `EasyScience`. + This template class is the basis for all minimizer engines in `EasyScience`. """ wrapping: str = None - def __init__(self, obj, fit_function: Callable, method: Optional[str] = None): + def __init__(self, obj: BaseObj, fit_function: Callable, method: Optional[str] = None): + if method not in self.available_methods(): + raise FitError(f'Method {method} not available in {self.__class__}') self._object = obj self._original_fit_function = fit_function self._method = method - self._cached_pars = {} - self._cached_pars_vals = {} + self._cached_pars: Dict[str, Parameter] = {} + self._cached_pars_vals: Dict[str, Tuple[float]] = {} self._cached_model = None self._fit_function = None self._constraints = [] @property - def all_constraints(self) -> list: + def all_constraints(self) -> List[ObjConstraint]: return [*self._constraints, *self._object._constraints] - def fit_constraints(self) -> list: + def fit_constraints(self) -> List[ObjConstraint]: return self._constraints - def set_fit_constraint(self, constraints): + def set_fit_constraint(self, constraints: List[ObjConstraint]): self._constraints = constraints - def add_fit_constraint(self, constraint): + def add_fit_constraint(self, constraint: ObjConstraint): self._constraints.append(constraint) - def remove_fit_constraint(self, index: int): + def remove_fit_constraint(self, index: int) -> None: del self._constraints[index] @abstractmethod - def make_model(self, pars=None): + def make_model(self, pars: List[Parameter] = None): """ Generate an engine model from the supplied `fit_function` and parameters in the base object. @@ -66,7 +78,7 @@ def fit( self, x: np.ndarray, y: np.ndarray, - weights: Optional[Union[np.ndarray]] = None, + weights: Optional[np.ndarray] = None, model=None, parameters=None, method=None, @@ -89,37 +101,52 @@ def fit( :return: Fit results """ - def evaluate(self, x: np.ndarray, parameters: dict = None, **kwargs) -> np.ndarray: + def evaluate(self, x: np.ndarray, minimizer_parameters: dict[str, float] = None, **kwargs) -> np.ndarray: """ Evaluate the fit function for values of x. Parameters used are either the latest or user supplied. If the parameters are user supplied, it must be in a dictionary of {'parameter_name': parameter_value,...}. :param x: x values for which the fit function will be evaluated :type x: np.ndarray - :param parameters: Dictionary of parameters which will be used in the fit function. They must be in a dictionary + :param minimizer_parameters: Dictionary of parameters which will be used in the fit function. They must be in a dictionary of {'parameter_name': parameter_value,...} - :type parameters: dict + :type minimizer_parameters: dict :param kwargs: additional arguments :return: y values calculated at points x for a set of parameters. :rtype: np.ndarray - """ + """ # noqa: E501 + if minimizer_parameters is None: + minimizer_parameters = {} + if not isinstance(minimizer_parameters, dict): + raise TypeError("minimizer_parameters must be a dictionary") + if self._fit_function is None: # This will also generate self._cached_pars self._fit_function = self._generate_fit_function() - if not isinstance(parameters, (dict, type(None))): - raise AttributeError + minimizer_parameters = self._prepare_parameters(minimizer_parameters) + + return self._fit_function(x, **minimizer_parameters, **kwargs) + + def _prepare_parameters(self, parameters: dict[str, float]) -> dict[str, float]: + """ + Prepare the parameters for the minimizer. + :param parameters: Dict of parameters for the minimizer with names as keys. + """ pars = self._cached_pars - new_parameters = parameters - if new_parameters is None: - new_parameters = {} + for name, item in pars.items(): - fit_name = 'p' + str(name) - if fit_name not in new_parameters.keys(): - new_parameters[fit_name] = item.raw_value + parameter_name = MINIMIZER_PARAMETER_PREFIX + str(name) + if parameter_name not in parameters.keys(): + ## TODO clean when full move to new_variable + from easyscience.Objects.new_variable import Parameter as NewParameter - return self._fit_function(x, **new_parameters, **kwargs) + if isinstance(item, NewParameter): + parameters[parameter_name] = item.value + else: + parameters[parameter_name] = item.raw_value + return parameters @abstractmethod def convert_to_pars_obj(self, par_list: Optional[Union[list]] = None): @@ -131,9 +158,18 @@ def convert_to_pars_obj(self, par_list: Optional[Union[list]] = None): :return: engine Parameters compatible object """ + @abstractmethod + def available_methods(self) -> List[str]: + """ + Return a list of available methods for the engine. + + :return: List of available methods + :rtype: List[str] + """ + @staticmethod @abstractmethod - def convert_to_par_object(obj): + def convert_to_par_object(obj: BaseObj): """ Convert an `EasyScience.Objects.Base.Parameter` object to an engine Parameter object. """ @@ -149,7 +185,7 @@ def _set_parameter_fit_result(self, fit_result): """ @abstractmethod - def _gen_fit_results(self, fit_results, **kwargs) -> 'FitResults': + def _gen_fit_results(self, fit_results, **kwargs) -> FitResults: """ Convert fit results into the unified `FitResults` format. diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 9fd0f966..10c500cc 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -14,6 +14,7 @@ from bumps.names import FitProblem from bumps.parameter import Parameter as BumpsParameter +from .minimizer_base import MINIMIZER_PARAMETER_PREFIX from .minimizer_base import MinimizerBase from .utils import FitError from .utils import FitResults @@ -57,10 +58,10 @@ def make_func(x, y, weights): par = {} if not pars: for name, item in obj._cached_pars.items(): - par['p' + str(name)] = obj.convert_to_par_object(item) + par[MINIMIZER_PARAMETER_PREFIX + str(name)] = obj.convert_to_par_object(item) else: for item in pars: - par['p' + item.unique_name] = obj.convert_to_par_object(item) + par[MINIMIZER_PARAMETER_PREFIX + item.unique_name] = obj.convert_to_par_object(item) return Curve(fit_func, x, y, dy=weights, **par) return make_func @@ -99,8 +100,7 @@ def fit_function(x: np.ndarray, **kwargs): for name, value in kwargs.items(): par_name = name[1:] if par_name in self._cached_pars.keys(): - - ## TODO clean when full move to new_variable + ## TODO clean when full move to new_variable from easyscience.Objects.new_variable import Parameter if isinstance(self._cached_pars[par_name], Parameter): @@ -109,7 +109,7 @@ def fit_function(x: np.ndarray, **kwargs): else: if self._cached_pars[par_name].raw_value != value: self._cached_pars[par_name].value = value - + # update_fun = self._cached_pars[par_name]._callback.fset # if update_fun: # update_fun(value) @@ -125,8 +125,9 @@ def fit_function(x: np.ndarray, **kwargs): # f = (x, a=1, b=2)... # Where we need to be generic. Note that this won't hold for much outside of this scope. - ## TODO clean when full move to new_variable + ## TODO clean when full move to new_variable from easyscience.Objects.new_variable import Parameter + if isinstance(parameter, Parameter): default_value = parameter.value else: @@ -137,7 +138,7 @@ def fit_function(x: np.ndarray, **kwargs): inspect.Parameter('x', inspect.Parameter.POSITIONAL_OR_KEYWORD, annotation=inspect._empty), *[ inspect.Parameter( - 'p' + str(name), + MINIMIZER_PARAMETER_PREFIX + str(name), inspect.Parameter.POSITIONAL_OR_KEYWORD, annotation=inspect._empty, default=default_value, @@ -204,8 +205,9 @@ def fit( model = model(x, y, weights) self._cached_model = model - ## TODO clean when full move to new_variable + ## TODO clean when full move to new_variable from easyscience.Objects.new_variable import Parameter + if isinstance(self._cached_pars[list(self._cached_pars.keys())[0]], Parameter): self._p_0 = {f'p{key}': self._cached_pars[key].value for key in self._cached_pars.keys()} else: @@ -252,16 +254,17 @@ def convert_to_par_object(obj) -> BumpsParameter: :return: bumps Parameter compatible object. :rtype: BumpsParameter """ - - ## TODO clean when full move to new_variable + + ## TODO clean when full move to new_variable from easyscience.Objects.new_variable import Parameter + if isinstance(obj, Parameter): value = obj.value else: value = obj.raw_value - + return BumpsParameter( - name='p' + obj.unique_name, + name=MINIMIZER_PARAMETER_PREFIX + obj.unique_name, value=value, bounds=[obj.min, obj.max], fixed=obj.fixed, @@ -314,8 +317,9 @@ def _gen_fit_results(self, fit_results, **kwargs) -> FitResults: ## TODO clean when full move to new_variable from easyscience.Objects.new_variable import Parameter + if isinstance(pars[dict_name], Parameter): - item[name] = pars[dict_name].value + item[name] = pars[dict_name].value else: item[name] = pars[dict_name].raw_value @@ -323,7 +327,7 @@ def _gen_fit_results(self, fit_results, **kwargs) -> FitResults: results.p = item results.x = self._cached_model.x results.y_obs = self._cached_model.y - results.y_calc = self.evaluate(results.x, parameters=results.p) + results.y_calc = self.evaluate(results.x, minimizer_parameters=results.p) results.y_err = self._cached_model.dy # results.residual = results.y_obs - results.y_calc # results.goodness_of_fit = np.sum(results.residual**2) diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index 641f9c10..ced5f107 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -10,6 +10,7 @@ import dfols import numpy as np +from .minimizer_base import MINIMIZER_PARAMETER_PREFIX from .minimizer_base import MinimizerBase from .utils import FitError from .utils import FitResults @@ -55,9 +56,9 @@ def make_func(x, y, weights): from easyscience.Objects.new_variable import Parameter if isinstance(item, Parameter): - par['p' + str(name)] = item.value + par[MINIMIZER_PARAMETER_PREFIX + str(name)] = item.value else: - par['p' + str(name)] = item.raw_value + par[MINIMIZER_PARAMETER_PREFIX + str(name)] = item.raw_value else: for item in pars: @@ -65,9 +66,9 @@ def make_func(x, y, weights): from easyscience.Objects.new_variable import Parameter if isinstance(item, Parameter): - par['p' + item.unique_name] = item.value + par[MINIMIZER_PARAMETER_PREFIX + item.unique_name] = item.value else: - par['p' + item.unique_name] = item.raw_value + par[MINIMIZER_PARAMETER_PREFIX + item.unique_name] = item.raw_value def residuals(x0) -> np.ndarray: for idx, par_name in enumerate(par.keys()): @@ -279,7 +280,7 @@ def _gen_fit_results(self, fit_results, weights, **kwargs) -> FitResults: results.p = item results.x = self._cached_model.x results.y_obs = self._cached_model.y - results.y_calc = self.evaluate(results.x, parameters=results.p) + results.y_calc = self.evaluate(results.x, minimizer_parameters=results.p) results.y_err = weights # results.residual = results.y_obs - results.y_calc # results.goodness_of_fit = fit_results.f @@ -295,14 +296,14 @@ def available_methods(self) -> List[str]: def dfols_fit(self, model: Callable, **kwargs): """ - Method to convert EasyScience styling to DFO-LS styling (yes, again) + Method to convert EasyScience styling to DFO-LS styling (yes, again) - :param model: Model which accepts f(x[0]) - :type model: Callable - :param kwargs: Any additional arguments for dfols.solver - :type kwargs: dict - :return: dfols fit results container - =""" + :param model: Model which accepts f(x[0]) + :type model: Callable + :param kwargs: Any additional arguments for dfols.solver + :type kwargs: dict + :return: dfols fit results container + """ ## TODO clean when full move to new_variable from easyscience.Objects.new_variable import Parameter @@ -316,5 +317,14 @@ def dfols_fit(self, model: Callable, **kwargs): np.array([par.min for par in iter(self._cached_pars.values())]), np.array([par.max for par in iter(self._cached_pars.values())]), ) - results = dfols.solve(model, x0, bounds=bounds, **kwargs) + # https://numericalalgorithmsgroup.github.io/dfols/build/html/userguide.html + if np.isinf(bounds).any(): + results = dfols.solve(model, x0, bounds=bounds, **kwargs) + else: + # It is only possible to scale (normalize) variables if they are bound (different from inf) + results = dfols.solve(model, x0, bounds=bounds, scaling_within_bounds=True, **kwargs) + + if 'Success' not in results.msg: + raise FitError(f'Fit failed with message: {results.msg}') + return results diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index 854c84fa..126ef814 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -2,10 +2,14 @@ # SPDX-License-Identifier: BSD-3-Clause # © 2021-2023 Contributors to the EasyScience project LMModel: """ Generate a lmfit model from the supplied `fit_function` and parameters in the base object. @@ -35,13 +57,15 @@ def make_model(self, pars: Optional[LMParameters] = None) -> LMModel: """ # Generate the fitting function fit_func = self._generate_fit_function() + self._fit_function = fit_func + if pars is None: pars = self._cached_pars # Create the model model = LMModel( fit_func, independent_vars=['x'], - param_names=['p' + str(key) for key in pars.keys()], + param_names=[MINIMIZER_PARAMETER_PREFIX + str(key) for key in pars.keys()], ) # Assign values from the `Parameter` to the model for name, item in pars.items(): @@ -56,7 +80,7 @@ def make_model(self, pars: Optional[LMParameters] = None) -> LMModel: else: value = item.raw_value - model.set_param_hint('p' + str(name), value=value, min=item.min, max=item.max) + model.set_param_hint(MINIMIZER_PARAMETER_PREFIX + str(name), value=value, min=item.min, max=item.max) # Cache the model for later reference self._cached_model = model @@ -70,8 +94,6 @@ def _generate_fit_function(self) -> Callable: :return: a fit function which is compatible with lmfit models :rtype: Callable """ - # Original fit function - func = self._original_fit_function # Get a list of `Parameters` self._cached_pars = {} self._cached_pars_vals = {} @@ -80,15 +102,15 @@ def _generate_fit_function(self) -> Callable: self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) - # Make a new fit function - def fit_function(x: np.ndarray, **kwargs): + # Make a lm fit function + def lm_fit_function(x: np.ndarray, **kwargs): """ - Wrapped fit function which now has a lmfit compatible form. + Fit function with a lmfit compatible signature. :param x: array of data points to be calculated :type x: np.ndarray :param kwargs: key word arguments - :return: points calculated at `x` + :return: points, `f(x)`, calculated at `x` :rtype: np.ndarray """ # Update the `Parameter` values and the callback if needed @@ -112,39 +134,12 @@ def fit_function(x: np.ndarray, **kwargs): # TODO Pre processing here for constraint in self.fit_constraints(): constraint() - return_data = func(x) + return_data = self._original_fit_function(x) # TODO Loading or manipulating data here return return_data - # Fake the function signature. - # This is done as lmfit wants the function to be in the form: - # f = (x, a=1, b=2)... - # Where we need to be generic. Note that this won't hold for much outside of this scope. - - ## TODO clean when full move to new_variable - from easyscience.Objects.new_variable import Parameter - - if isinstance(parameter, Parameter): - default_value = parameter.value - else: - default_value = parameter.raw_value - - params = [ - inspect.Parameter('x', inspect.Parameter.POSITIONAL_OR_KEYWORD, annotation=inspect._empty), - *[ - inspect.Parameter( - 'p' + str(name), - inspect.Parameter.POSITIONAL_OR_KEYWORD, - annotation=inspect._empty, - default=default_value, - ) - for name, parameter in self._cached_pars.items() - ], - ] - # Sign the function - fit_function.__signature__ = inspect.Signature(params) - self._fit_function = fit_function - return fit_function + lm_fit_function.__signature__ = _wrap_to_lm_signature(self._cached_pars) + return lm_fit_function def fit( self, @@ -182,8 +177,11 @@ def fit( default_method = {} if self._method is not None: default_method = {'method': self._method} - if method is not None and method in self.available_methods(): - default_method['method'] = method + if method is not None: + if method in self.available_methods(): + default_method['method'] = method + else: + raise FitError(f'Method {method} not available in {self.__class__}') if weights is None: weights = 1 / np.sqrt(np.abs(y)) @@ -216,35 +214,39 @@ def fit( raise FitError(e) return results - def convert_to_pars_obj(self, par_list: Optional[List] = None) -> LMParameters: + def convert_to_pars_obj(self, parameters: Optional[List[Union[Parameter, NewParameter]]] = None) -> LMParameters: """ Create an lmfit compatible container with the `Parameters` converted from the base object. - :param par_list: If only a single/selection of parameter is required. Specify as a list - :type par_list: List[str] + :param parameters: If only a single/selection of parameter is required. Specify as a list :return: lmfit Parameters compatible object - :rtype: LMParameters """ - if par_list is None: + if parameters is None: # Assume that we have a BaseObj for which we can obtain a list - par_list = self._object.get_fit_parameters() - pars_obj = LMParameters().add_many([self.__class__.convert_to_par_object(obj) for obj in par_list]) - return pars_obj + parameters = self._object.get_fit_parameters() + lm_parameters = LMParameters().add_many([self.convert_to_par_object(parameter) for parameter in parameters]) + return lm_parameters @staticmethod - def convert_to_par_object(obj) -> LMParameter: + def convert_to_par_object(parameter: Union[Parameter, NewParameter]) -> LMParameter: """ Convert an `EasyScience.Objects.Base.Parameter` object to a lmfit Parameter object. :return: lmfit Parameter compatible object. :rtype: LMParameter """ + ## TODO clean when full move to new_variable + if isinstance(parameter, NewParameter): + value = parameter.value + else: + value = parameter.raw_value + return LMParameter( - 'p' + obj.unique_name, - value=obj.raw_value, - vary=not obj.fixed, - min=obj.min, - max=obj.max, + MINIMIZER_PARAMETER_PREFIX + parameter.unique_name, + value=value, + vary=not parameter.fixed, + min=parameter.min, + max=parameter.max, expr=None, brute_step=None, ) @@ -267,9 +269,9 @@ def _set_parameter_fit_result(self, fit_result: ModelResult, stack_status: bool) global_object.stack.enabled = True global_object.stack.beginMacro('Fitting routine') for name in pars.keys(): - pars[name].value = fit_result.params['p' + str(name)].value + pars[name].value = fit_result.params[MINIMIZER_PARAMETER_PREFIX + str(name)].value if fit_result.errorbars: - pars[name].error = fit_result.params['p' + str(name)].stderr + pars[name].error = fit_result.params[MINIMIZER_PARAMETER_PREFIX + str(name)].stderr else: pars[name].error = 0.0 if stack_status: @@ -321,3 +323,30 @@ def available_methods(self) -> List[str]: 'cobyla', 'bfgs', ] + + +def _wrap_to_lm_signature(parameters: Dict[int, Union[Parameter, NewParameter]]) -> Signature: + """ + Wrap the function signature. + This is done as lmfit wants the function to be in the form: + f = (x, a=1, b=2)... + Where we need to be generic. Note that this won't hold for much outside of this scope. + """ + wrapped_parameters = [] + wrapped_parameters.append(InspectParameter('x', InspectParameter.POSITIONAL_OR_KEYWORD, annotation=_empty)) + for name, parameter in parameters.items(): + ## TODO clean when full move to new_variable + if isinstance(parameter, NewParameter): + default_value = parameter.value + else: + default_value = parameter.raw_value + + wrapped_parameters.append( + InspectParameter( + MINIMIZER_PARAMETER_PREFIX + str(name), + InspectParameter.POSITIONAL_OR_KEYWORD, + annotation=_empty, + default=default_value, + ) + ) + return Signature(wrapped_parameters) diff --git a/tests/unit_tests/Fitting/minimizers/test_minimizer_base.py b/tests/unit_tests/Fitting/minimizers/test_minimizer_base.py new file mode 100644 index 00000000..dd6a29de --- /dev/null +++ b/tests/unit_tests/Fitting/minimizers/test_minimizer_base.py @@ -0,0 +1,115 @@ +import pytest + +from unittest.mock import MagicMock + +from easyscience.fitting.minimizers.minimizer_base import MinimizerBase +from easyscience.fitting.minimizers.utils import FitError + +class TestMinimizerBase(): + @pytest.fixture + def minimizer(self): + # This avoids the error: TypeError: Can't instantiate abstract class with abstract methods __init__ + MinimizerBase.__abstractmethods__ = set() + MinimizerBase.available_methods = MagicMock(return_value=['method']) + + minimizer = MinimizerBase( + obj='obj', + fit_function='fit_function', + method='method' + ) + return minimizer + + def test_init_exception(self): + # When Then + MinimizerBase.__abstractmethods__ = set() + MinimizerBase.available_methods = MagicMock(return_value=['method']) + # Expect + with pytest.raises(FitError): + MinimizerBase( + obj='obj', + fit_function='fit_function', + method='not_a_method' + ) + + def test_init(self, minimizer: MinimizerBase): + assert minimizer._object == 'obj' + assert minimizer._original_fit_function == 'fit_function' + assert minimizer._method == 'method' + assert minimizer._cached_pars == {} + assert minimizer._cached_pars_vals == {} + assert minimizer._cached_model == None + assert minimizer._fit_function == None + assert minimizer._constraints == [] + + def test_evaluate(self, minimizer: MinimizerBase): + # When + minimizer._fit_function = MagicMock(return_value='fit_function_return') + minimizer._prepare_parameters = MagicMock(return_value={'prepared_parms_key': 'prepared_parms_val'}) + + # Then + result = minimizer.evaluate('x', minimizer_parameters={'parms_key': 'parms_val'}, kwargs={'kwargs_key': 'kwargs_val'}) + + # Expect + assert result == 'fit_function_return' + minimizer._fit_function.assert_called_once_with('x', prepared_parms_key='prepared_parms_val', kwargs={'kwargs_key': 'kwargs_val'}) + minimizer._prepare_parameters.assert_called_once_with({'parms_key': 'parms_val'}) + + def test_evaluate_no_fit_function(self, minimizer: MinimizerBase): + # When + mock_fit_function = MagicMock() + minimizer._fit_function = None + minimizer._prepare_parameters = MagicMock(return_value={'prepared_parms_key': 'prepared_parms_val'}) + minimizer._generate_fit_function = MagicMock(return_value=mock_fit_function) + + # Then + minimizer.evaluate('x', minimizer_parameters={'parms_key': 'parms_val'}, kwargs={'kwargs_key': 'kwargs_val'}) + + # Expect + mock_fit_function.assert_called_once_with('x', prepared_parms_key='prepared_parms_val', kwargs={'kwargs_key': 'kwargs_val'}) + minimizer._prepare_parameters.assert_called_once_with({'parms_key': 'parms_val'}) + + def test_evaluate_no_parameters(self, minimizer: MinimizerBase): + # When + minimizer._fit_function = MagicMock(return_value='fit_function_return') + minimizer._prepare_parameters = MagicMock(return_value={'parms_key': 'parms_val'}) + + # Then + minimizer.evaluate('x') + + # Expect + minimizer._prepare_parameters.assert_called_once_with({}) + minimizer._fit_function.assert_called_once_with('x', parms_key='parms_val') + + def test_evaluate_exception(self, minimizer: MinimizerBase): + # When + minimizer_parameters = 'not dict type' + + # Then Expect + with pytest.raises(TypeError): + minimizer.evaluate('x', minimizer_parameters=minimizer_parameters) + + def test_prepare_parameters(self, minimizer: MinimizerBase): + # When + parameters = { + 'pa': 1, + 'pb': 2 + } + + minimizer._cached_pars = { + 'a': MagicMock(), + 'b': MagicMock(), + 'c': MagicMock() + } + minimizer._cached_pars['a'].raw_value = 3 + minimizer._cached_pars['b'].raw_value = 4 + minimizer._cached_pars['c'].raw_value = 5 + + # Then + parameters = minimizer._prepare_parameters(parameters) + + # Expect + assert parameters == { + 'pa': 1, + 'pb': 2, + 'pc': 5 + } \ No newline at end of file diff --git a/tests/unit_tests/Fitting/minimizers/test_minimizer_lmfit.py b/tests/unit_tests/Fitting/minimizers/test_minimizer_lmfit.py new file mode 100644 index 00000000..f91237d7 --- /dev/null +++ b/tests/unit_tests/Fitting/minimizers/test_minimizer_lmfit.py @@ -0,0 +1,382 @@ +import pytest + +from inspect import Parameter as InspectParameter +from inspect import Signature +from inspect import _empty +from unittest.mock import MagicMock + +import easyscience.fitting.minimizers.minimizer_lmfit + +from easyscience.fitting.minimizers.minimizer_lmfit import LMFit +from easyscience.fitting.minimizers.minimizer_lmfit import _wrap_to_lm_signature +from easyscience.Objects.new_variable import Parameter +from lmfit import Parameter as LMParameter +from easyscience.Objects.ObjectClasses import BaseObj +from easyscience.fitting.minimizers.utils import FitError + + +class TestLMFit(): + @pytest.fixture + def minimizer(self): + minimizer = LMFit( + obj='obj', + fit_function='fit_function', + method='least_squares' + ) + return minimizer + + def test_init_exception(self): + with pytest.raises(FitError): + LMFit( + obj='obj', + fit_function='fit_function', + method='method' + ) + + def test_make_model(self, minimizer: LMFit, monkeypatch): + # When + mock_lm_model = MagicMock() + mock_LMModel = MagicMock(return_value=mock_lm_model) + monkeypatch.setattr(easyscience.fitting.minimizers.minimizer_lmfit, "LMModel", mock_LMModel) + minimizer._generate_fit_function = MagicMock(return_value='model') + mock_parm_1 = MagicMock(LMParameter) + mock_parm_1.value = 1.0 + mock_parm_1.min = -10.0 + mock_parm_1.max = 10.0 + mock_parm_2 = MagicMock(LMParameter) + mock_parm_2.value = 2.0 + mock_parm_2.min = -20.0 + mock_parm_2.max = 20.0 + pars = {'key_1': mock_parm_1, 'key_2': mock_parm_2} + + # Then + model = minimizer.make_model(pars=pars) + + # Expect + minimizer._generate_fit_function.assert_called_once_with() + mock_LMModel.assert_called_once_with('model', independent_vars=['x'], param_names=['pkey_1', 'pkey_2']) + mock_lm_model.set_param_hint.assert_called_with('pkey_2', value=2.0, min=-20.0, max=20.0) + assert mock_lm_model.set_param_hint.call_count == 2 + assert model == mock_lm_model + + def test_make_model_no_pars(self, minimizer: LMFit, monkeypatch): + # When + mock_lm_model = MagicMock() + mock_LMModel = MagicMock(return_value=mock_lm_model) + monkeypatch.setattr(easyscience.fitting.minimizers.minimizer_lmfit, "LMModel", mock_LMModel) + minimizer._generate_fit_function = MagicMock(return_value='model') + mock_parm_1 = MagicMock(Parameter) + mock_parm_1.value = 1.0 + mock_parm_1.min = -10.0 + mock_parm_1.max = 10.0 + mock_parm_2 = MagicMock(Parameter) + mock_parm_2.value = 2.0 + mock_parm_2.min = -20.0 + mock_parm_2.max = 20.0 + minimizer._cached_pars = {'key_1': mock_parm_1, 'key_2': mock_parm_2} + + # Then + model = minimizer.make_model() + + # Expect + minimizer._generate_fit_function.assert_called_once_with() + mock_LMModel.assert_called_once_with('model', independent_vars=['x'], param_names=['pkey_1', 'pkey_2']) + mock_lm_model.set_param_hint.assert_called_with('pkey_2', value=2.0, min=-20.0, max=20.0) + assert mock_lm_model.set_param_hint.call_count == 2 + assert model == mock_lm_model + + def test_generate_fit_function_signatur(self, minimizer: LMFit, monkeypatch): + # When + mock_parm_1 = MagicMock(Parameter) + mock_parm_1.value = 1.0 + mock_parm_1.error = 0.1 + mock_parm_2 = MagicMock(Parameter) + mock_parm_2.value = 2.0 + mock_parm_2.error = 0.2 + mock_obj = MagicMock(BaseObj) + mock_obj.get_fit_parameters = MagicMock(return_value=[mock_parm_1, mock_parm_2]) + minimizer._object = mock_obj + + mock_wrap_to_lm_signature = MagicMock(return_value='signature') + monkeypatch.setattr(easyscience.fitting.minimizers.minimizer_lmfit, "_wrap_to_lm_signature", mock_wrap_to_lm_signature) + + minimizer._original_fit_function = MagicMock(return_value='fit_function_return') + + # Then + fit_function = minimizer._generate_fit_function() + + # Expect + assert fit_function.__signature__ == 'signature' + + def test_generate_fit_function_lm_fit_function(self, minimizer: LMFit, monkeypatch): + # When + mock_parm_1 = MagicMock(Parameter) + mock_parm_1.value = 1.0 + mock_parm_1.error = 0.1 + mock_parm_2 = MagicMock(Parameter) + mock_parm_2.value = 2.0 + mock_parm_2.error = 0.2 + mock_obj = MagicMock(BaseObj) + mock_obj.get_fit_parameters = MagicMock(return_value=[mock_parm_1, mock_parm_2]) + minimizer._object = mock_obj + + mock_wrap_to_lm_signature = MagicMock(return_value='signature') + monkeypatch.setattr(easyscience.fitting.minimizers.minimizer_lmfit, "_wrap_to_lm_signature", mock_wrap_to_lm_signature) + + minimizer._original_fit_function = MagicMock(return_value='fit_function_return') + + mock_constraint = MagicMock() + minimizer.fit_constraints = MagicMock(return_value=[mock_constraint]) + + fit_function = minimizer._generate_fit_function() + + # Then + result = fit_function(1) + + # Expect + result == 'fit_function_return' + mock_constraint.assert_called_once_with() + + def test_fit(self, minimizer: LMFit): + # When + from easyscience import global_object + global_object.stack.enabled = False + + mock_model = MagicMock() + mock_model.fit = MagicMock(return_value='fit') + minimizer.make_model = MagicMock(return_value=mock_model) + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + # Then + result = minimizer.fit(x=1.0, y=2.0) + + # Expect + assert result == 'gen_fit_results' + mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=0.7071067811865475, method='least_squares') + minimizer.make_model.assert_called_once_with() + minimizer._set_parameter_fit_result.assert_called_once_with('fit', False) + minimizer._gen_fit_results.assert_called_once_with('fit') + + def test_fit_model(self, minimizer: LMFit): + # When + mock_model = MagicMock() + mock_model.fit = MagicMock(return_value='fit') + minimizer.make_model = MagicMock(return_value=mock_model) + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + # Then + minimizer.fit(x=1.0, y=2.0, model=mock_model) + + # Expect + mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=0.7071067811865475, method='least_squares') + minimizer.make_model.assert_not_called() + + def test_fit_method(self, minimizer: LMFit): + # When + mock_model = MagicMock() + mock_model.fit = MagicMock(return_value='fit') + minimizer.make_model = MagicMock(return_value=mock_model) + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + minimizer.available_methods = MagicMock(return_value=['method_passed']) + + # Then + minimizer.fit(x=1.0, y=2.0, method='method_passed') + + # Expect + mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=0.7071067811865475, method='method_passed') + minimizer.available_methods.assert_called_once_with() + + def test_fit_kwargs(self, minimizer: LMFit): + # When + mock_model = MagicMock() + mock_model.fit = MagicMock(return_value='fit') + minimizer.make_model = MagicMock(return_value=mock_model) + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + # Then + minimizer.fit(x=1.0, y=2.0, minimizer_kwargs={'minimizer_key': 'minimizer_val'}, engine_kwargs={'engine_key': 'engine_val'}) + + # Expect + mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=0.7071067811865475, method='least_squares', fit_kws={'minimizer_key': 'minimizer_val'}, engine_key='engine_val') + + def test_fit_exception(self, minimizer: LMFit): + # When + minimizer.make_model = MagicMock(side_effect=Exception('Exception')) + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + # Then Expect + with pytest.raises(FitError): + minimizer.fit(x=1.0, y=2.0) + + def test_convert_to_pars_obj(self, minimizer: LMFit, monkeypatch): + # When + minimizer._object = MagicMock() + minimizer._object.get_fit_parameters = MagicMock(return_value = ['parm_1', 'parm_2']) + + minimizer.convert_to_par_object = MagicMock(return_value='convert_to_par_object') + + mock_lm_parameter = MagicMock() + mock_lm_parameter.add_many = MagicMock(return_value='add_many') + mock_LMParameters = MagicMock(return_value=mock_lm_parameter) + monkeypatch.setattr(easyscience.fitting.minimizers.minimizer_lmfit, "LMParameters", mock_LMParameters) + + # Then + pars = minimizer.convert_to_pars_obj() + + # Expect + assert pars == 'add_many' + assert minimizer.convert_to_par_object.call_count == 2 + minimizer._object.get_fit_parameters.assert_called_once_with() + minimizer.convert_to_par_object.assert_called_with('parm_2') + mock_lm_parameter.add_many.assert_called_once_with(['convert_to_par_object', 'convert_to_par_object']) + + def test_convert_to_pars_obj_with_parameters(self, minimizer: LMFit, monkeypatch): + # When + minimizer.convert_to_par_object = MagicMock(return_value='convert_to_par_object') + + mock_lm_parameter = MagicMock() + mock_lm_parameter.add_many = MagicMock(return_value='add_many') + mock_LMParameters = MagicMock(return_value=mock_lm_parameter) + monkeypatch.setattr(easyscience.fitting.minimizers.minimizer_lmfit, "LMParameters", mock_LMParameters) + + # Then + pars = minimizer.convert_to_pars_obj(['parm_1', 'parm_2']) + + # Expect + assert pars == 'add_many' + assert minimizer.convert_to_par_object.call_count == 2 + minimizer.convert_to_par_object.assert_called_with('parm_2') + mock_lm_parameter.add_many.assert_called_once_with(['convert_to_par_object', 'convert_to_par_object']) + + def test_convert_to_par_object(self, minimizer: LMFit, monkeypatch): + # When + mock_lm_parameter = MagicMock() + mock_LMParameter = MagicMock(return_value=mock_lm_parameter) + monkeypatch.setattr(easyscience.fitting.minimizers.minimizer_lmfit, "LMParameter", mock_LMParameter) + + mock_parm = MagicMock(Parameter) + mock_parm.value = 1.0 + mock_parm.fixed = True + mock_parm.min = -10.0 + mock_parm.max = 10.0 + mock_parm.unique_name = 'key_converted' + + # Then + par = minimizer.convert_to_par_object(mock_parm) + + # Expect + assert par == mock_lm_parameter + mock_LMParameter.assert_called_once_with('pkey_converted', value=1.0, vary=False, min=-10.0, max=10.0, expr=None, brute_step=None) + + def test_set_parameter_fit_result_no_stack_status(self, minimizer: LMFit): + # When + minimizer._cached_pars = { + 'a': MagicMock(), + 'b': MagicMock(), + } + minimizer._cached_pars['a'].value = 'a' + minimizer._cached_pars['b'].value = 'b' + + mock_param_a = MagicMock() + mock_param_a.value = 1.0 + mock_param_a.stderr = 0.1 + mock_param_b = MagicMock + mock_param_b.value = 2.0 + mock_param_b.stderr = 0.2 + mock_fit_result = MagicMock() + mock_fit_result.params = {'pa': mock_param_a, 'pb': mock_param_b} + mock_fit_result.errorbars = True + + # Then + minimizer._set_parameter_fit_result(mock_fit_result, False) + + # Expect + assert minimizer._cached_pars['a'].value == 1.0 + assert minimizer._cached_pars['a'].error == 0.1 + assert minimizer._cached_pars['b'].value == 2.0 + assert minimizer._cached_pars['b'].error == 0.2 + + def test_set_parameter_fit_result_no_stack_status_no_error(self, minimizer: LMFit): + # When + minimizer._cached_pars = { + 'a': MagicMock(), + 'b': MagicMock(), + } + minimizer._cached_pars['a'].value = 'a' + minimizer._cached_pars['b'].value = 'b' + + mock_param_a = MagicMock() + mock_param_a.value = 1.0 + mock_param_a.stderr = 0.1 + mock_param_b = MagicMock + mock_param_b.value = 2.0 + mock_param_b.stderr = 0.2 + mock_fit_result = MagicMock() + mock_fit_result.params = {'pa': mock_param_a, 'pb': mock_param_b} + mock_fit_result.errorbars = False + + # Then + minimizer._set_parameter_fit_result(mock_fit_result, False) + + # Expect + assert minimizer._cached_pars['a'].value == 1.0 + assert minimizer._cached_pars['a'].error == 0.0 + assert minimizer._cached_pars['b'].value == 2.0 + assert minimizer._cached_pars['b'].error == 0.0 + + def test_gen_fit_results(self, minimizer: LMFit, monkeypatch): + # When + mock_domain_fit_results = MagicMock() + mock_FitResults = MagicMock(return_value=mock_domain_fit_results) + monkeypatch.setattr(easyscience.fitting.minimizers.minimizer_lmfit, "FitResults", mock_FitResults) + + mock_fit_result = MagicMock() + mock_fit_result.success ='success' + mock_fit_result.data = 'data' + mock_fit_result.userkws = {'x': 'x_val'} + mock_fit_result.values = 'values' + mock_fit_result.init_values = 'init_values' + mock_fit_result.best_fit = 'best_fit' + mock_fit_result.weights = 10 + + # Then + domain_fit_results = minimizer._gen_fit_results(mock_fit_result, **{'kwargs_set_key': 'kwargs_set_val'}) + + # Expect + assert domain_fit_results == mock_domain_fit_results + assert domain_fit_results.kwargs_set_key == 'kwargs_set_val' + assert domain_fit_results.success == 'success' + assert domain_fit_results.y_obs == 'data' + assert domain_fit_results.x == 'x_val' + assert domain_fit_results.p == 'values' + assert domain_fit_results.p0 == 'init_values' + assert domain_fit_results.y_calc == 'best_fit' + assert domain_fit_results.y_err == 0.1 + assert str(domain_fit_results.minimizer_engine) == "" + assert domain_fit_results.fit_args is None + +def test_wrap_to_lm_signature(): + # When + mock_parm_1 = MagicMock(Parameter) + mock_parm_1.value = 1.0 + mock_parm_2 = MagicMock(Parameter) + mock_parm_2.value = 2.0 + pars = {1: mock_parm_1, 2: mock_parm_2} + + + # Then + signature = _wrap_to_lm_signature(pars) + + # Expect + wrapped_parameters = [ + InspectParameter('x', InspectParameter.POSITIONAL_OR_KEYWORD, annotation=_empty), + InspectParameter('p1', InspectParameter.POSITIONAL_OR_KEYWORD, annotation=_empty, default=1.0), + InspectParameter('p2', InspectParameter.POSITIONAL_OR_KEYWORD, annotation=_empty, default=2.0) + ] + expected_signature = Signature(wrapped_parameters) + assert signature == expected_signature