Skip to content

Commit

Permalink
Merge branch 'master' into cost_func_options_write
Browse files Browse the repository at this point in the history
  • Loading branch information
jess-farmer committed May 7, 2024
2 parents 567d674 + 76eb8d7 commit 03577fc
Show file tree
Hide file tree
Showing 7 changed files with 150 additions and 171 deletions.
45 changes: 22 additions & 23 deletions fitbenchmarking/controllers/bumps_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,10 @@
Implements a controller for the Bumps fitting software.
"""

import numpy as np
from bumps.fitters import fit as bumpsFit
from bumps.names import Curve, FitProblem, PoissonCurve

import numpy as np

from fitbenchmarking.controllers.base_controller import Controller
from fitbenchmarking.cost_func.cost_func_factory import create_cost_func
from fitbenchmarking.utils.exceptions import MaxRuntimeError
Expand All @@ -21,24 +20,24 @@ class BumpsController(Controller):
"""

algorithm_check = {
'all': ['amoeba',
'lm-bumps',
'newton',
'de',
'scipy-leastsq',
'dream'],
'ls': ['lm-bumps', 'scipy-leastsq'],
'deriv_free': ['amoeba', 'de'],
'general': ['amoeba', 'newton', 'de'],
'simplex': ['amoeba'],
'trust_region': ['lm-bumps', 'scipy-leastsq'],
'levenberg-marquardt': ['lm-bumps', 'scipy-leastsq'],
'gauss_newton': [],
'bfgs': ['newton'],
'conjugate_gradient': [],
'steepest_descent': [],
'global_optimization': ['de'],
'MCMC': ['dream']}
'all': ['amoeba',
'lm-bumps',
'newton',
'de',
'scipy-leastsq',
'dream'],
'ls': ['lm-bumps', 'scipy-leastsq'],
'deriv_free': ['amoeba', 'de'],
'general': ['amoeba', 'newton', 'de'],
'simplex': ['amoeba'],
'trust_region': ['lm-bumps', 'scipy-leastsq'],
'levenberg-marquardt': ['lm-bumps', 'scipy-leastsq'],
'gauss_newton': [],
'bfgs': ['newton'],
'conjugate_gradient': [],
'steepest_descent': [],
'global_optimization': ['de'],
'MCMC': ['dream']}

def __init__(self, cost_func):
"""
Expand All @@ -49,9 +48,9 @@ def __init__(self, cost_func):
:class:`~fitbenchmarking.cost_func.base_cost_func.CostFunc`
"""
super().__init__(cost_func)

self._param_names = [name.replace('.', '_')
for name in self.problem.param_names]
# Need unique strings that are valid python vars
self._param_names = [
f'p{i}' for (i, _) in enumerate(self.problem.param_names)]
self.support_for_bounds = True
self._func_wrapper = None
self._fit_problem = None
Expand Down
42 changes: 29 additions & 13 deletions fitbenchmarking/controllers/lmfit_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"""
import numpy as np
from lmfit import Minimizer, Parameters

from fitbenchmarking.controllers.base_controller import Controller
from fitbenchmarking.utils.exceptions import MissingBoundsError

Expand Down Expand Up @@ -38,7 +39,6 @@ class LmfitController(Controller):
'leastsq'],
'deriv_free': ['powell',
'cobyla',
'emcee',
'nelder',
'differential_evolution'],
'general': ['nelder',
Expand Down Expand Up @@ -72,8 +72,8 @@ class LmfitController(Controller):
'ampgo',
'shgo',
'dual_annealing'],
'MCMC': []
}
'MCMC': ['emcee']
}

jacobian_enabled_solvers = ['cg',
'bfgs',
Expand Down Expand Up @@ -105,21 +105,32 @@ def __init__(self, cost_func):
self.bound_minimizers = ['dual_annealing', 'differential_evolution']
self.lmfit_out = None
self.lmfit_params = Parameters()
self._param_names = [
f'p{i}' for (i, _) in enumerate(self.problem.param_names)]

def lmfit_resdiuals(self, params):
"""
lmfit resdiuals
"""
return self.cost_func.eval_r(list(map(lambda name: params[name].value,
self.problem.param_names)))
self._param_names)))

def lmfit_loglike(self, params):
"""
lmfit resdiuals
"""
return self.cost_func.eval_loglike(
list(map(lambda name: params[name].value,
self.problem.param_names))
)

def lmfit_jacobians(self, params):
"""
lmfit jacobians
"""
return self.cost_func.jac_cost(list(map(lambda name:
params[name].value,
self.problem.param_names)))
self._param_names)))

def setup(self):
"""
Expand All @@ -129,10 +140,10 @@ def setup(self):
if (self.value_ranges is None or np.any(np.isinf(self.value_ranges))) \
and self.minimizer in self.bound_minimizers:
raise MissingBoundsError(
f"{self.minimizer} requires finite bounds on all"
" parameters")
f"{self.minimizer} requires finite bounds on all"
" parameters")

for i, name in enumerate(self.problem.param_names):
for i, name in enumerate(self._param_names):
kwargs = {"name": name,
"value": self.initial_params[i]}
if self.value_ranges is not None:
Expand All @@ -146,17 +157,19 @@ def fit(self):
"""
Run problem with lmfit
"""

minner = Minimizer(self.lmfit_resdiuals, self.lmfit_params)

kwargs = {"method": self.minimizer}
if self.minimizer == "emcee":
kwargs["progress"] = False
kwargs["burn"] = 300
minner = Minimizer(self.lmfit_loglike, self.lmfit_params)
else:
minner = Minimizer(self.lmfit_resdiuals, self.lmfit_params)

if self.minimizer in self.jacobian_enabled_solvers:
kwargs["Dfun"] = self.lmfit_jacobians
if self.cost_func.hessian and \
self.minimizer in self.hessian_enabled_solvers:
kwargs["hess"] = self.cost_func.hes_cost
if self.minimizer == "emcee":
kwargs["progress"] = False
self.lmfit_out = minner.minimize(**kwargs)

def cleanup(self):
Expand All @@ -170,5 +183,8 @@ def cleanup(self):
else:
self.flag = 2

if self.minimizer == 'emcee':
self.params_pdfs = self.lmfit_out.flatchain.to_dict(orient='list')

self.final_params = list(map(lambda params: params.value,
self.lmfit_out.params.values()))
58 changes: 34 additions & 24 deletions fitbenchmarking/controllers/mantid_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
Implements a controller for the Mantid fitting software.
"""

import numpy as np
from mantid import simpleapi as msapi
from mantid.fitfunctions import FunctionFactory, IFunction1D
import numpy as np

from fitbenchmarking.controllers.base_controller import Controller
from fitbenchmarking.cost_func.cost_func_factory import create_cost_func
Expand All @@ -25,32 +25,33 @@ class MantidController(Controller):
'nlls': 'Unweighted least squares',
'weighted_nlls': 'Least squares',
'poisson': 'Poisson',
'loglike_nlls': 'Least squares'
}

algorithm_check = {
'all': ['BFGS', 'Conjugate gradient (Fletcher-Reeves imp.)',
'all': ['BFGS', 'Conjugate gradient (Fletcher-Reeves imp.)',
'Conjugate gradient (Polak-Ribiere imp.)',
'Damped GaussNewton', 'Levenberg-Marquardt',
'Levenberg-MarquardtMD', 'Simplex', 'SteepestDescent',
'Trust Region', 'FABADA'],
'ls': ['Levenberg-Marquardt', 'Levenberg-MarquardtMD',
'Trust Region'],
'deriv_free': ['Simplex'],
'general': ['BFGS', 'Conjugate gradient (Fletcher-Reeves imp.)',
'Conjugate gradient (Polak-Ribiere imp.)',
'Damped GaussNewton', 'Levenberg-Marquardt',
'Levenberg-MarquardtMD', 'Simplex', 'SteepestDescent',
'Trust Region', 'FABADA'],
'ls': ['Levenberg-Marquardt', 'Levenberg-MarquardtMD',
'Trust Region', 'FABADA'],
'deriv_free': ['Simplex', 'FABADA'],
'general': ['BFGS', 'Conjugate gradient (Fletcher-Reeves imp.)',
'Conjugate gradient (Polak-Ribiere imp.)',
'Damped GaussNewton', 'Simplex', 'SteepestDescent'],
'simplex': ['Simplex'],
'trust_region': ['Trust Region', 'Levenberg-Marquardt',
'Levenberg-MarquardtMD'],
'levenberg-marquardt': ['Levenberg-Marquardt',
'Levenberg-MarquardtMD'],
'gauss_newton': ['Damped GaussNewton'],
'bfgs': ['BFGS'],
'conjugate_gradient': ['Conjugate gradient (Fletcher-Reeves imp.)',
'Conjugate gradient (Polak-Ribiere imp.)'],
'steepest_descent': ['SteepestDescent'],
'global_optimization': ['FABADA'],
'MCMC': []}
'Damped GaussNewton', 'Simplex', 'SteepestDescent'],
'simplex': ['Simplex'],
'trust_region': ['Trust Region', 'Levenberg-Marquardt',
'Levenberg-MarquardtMD'],
'levenberg-marquardt': ['Levenberg-Marquardt',
'Levenberg-MarquardtMD'],
'gauss_newton': ['Damped GaussNewton'],
'bfgs': ['BFGS'],
'conjugate_gradient': ['Conjugate gradient (Fletcher-Reeves imp.)',
'Conjugate gradient (Polak-Ribiere imp.)'],
'steepest_descent': ['SteepestDescent'],
'global_optimization': [],
'MCMC': ['FABADA']}

jacobian_enabled_solvers = ['BFGS',
'Conjugate gradient (Fletcher-Reeves imp.)',
Expand Down Expand Up @@ -240,7 +241,8 @@ def fit(self):
# to work; setting to the value in the mantid docs
minimizer_str += (",Chain Length=100000"
",Steps between values=10"
",Convergence Criteria=0.01")
",Convergence Criteria=0.01"
",PDF=1,ConvergedChain=chain")
self._added_args['MaxIterations'] = 2000000

fit_result = msapi.Fit(Function=self._mantid_function,
Expand Down Expand Up @@ -269,6 +271,14 @@ def cleanup(self):
self._mantid_results.OutputParameters.column(0),
self._mantid_results.OutputParameters.column(1)))

if self.minimizer == 'FABADA':
self.params_pdfs = {}
n_chains = \
self._mantid_results.ConvergedChain.getNumberHistograms()
for i in range(0, n_chains-1):
self.params_pdfs[self._param_names[i]] = \
self._mantid_results.ConvergedChain.readY(i).tolist()

if not self._multi_fit:
self.final_params = [final_params_dict[key]
for key in self._param_names]
Expand Down
10 changes: 10 additions & 0 deletions fitbenchmarking/controllers/tests/test_controllers.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,6 +523,16 @@ def test_lmfit(self):
controller.lmfit_out.success = False
self.shared_tests.check_diverged(controller)

def test_variable_names_corrected_in_controllers(self):
"""
Test if variable names are corrected properly
within the LmfitController and BumpsController
"""
for control in ([LmfitController, BumpsController]):
self.cost_func.param_names = ['b.1', 'b@2', 'b-3', 'b_4']
controller = control(self.cost_func)
assert controller._param_names == ['p0', 'p1', 'p2', 'p3']


@run_for_test_types(TEST_TYPE, 'all')
class ControllerBoundsTests(TestCase):
Expand Down
4 changes: 2 additions & 2 deletions fitbenchmarking/utils/fitbm_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ def norm_acc(self, value):
"""
Stores the normalised accuracy and updates the value
:param value: New value for norm_runtime
:param value: New value for norm_accuracy
:type value: float
"""
self._norm_acc = value
Expand All @@ -373,7 +373,7 @@ def norm_runtime(self):
if self.min_runtime in [np.nan, np.inf]:
self._norm_runtime = np.inf
else:
self._norm_runtime = self.mean_runtime / self.min_runtime
self._norm_runtime = self.runtime / self.min_runtime
return self._norm_runtime

@norm_runtime.setter
Expand Down

0 comments on commit 03577fc

Please sign in to comment.