Skip to content

Commit

Permalink
Added emcee and FABADA as Bayesian minimizers (#1272)
Browse files Browse the repository at this point in the history
* add emcee as mcmc minimizer

* add FABADA to mcmc methods

* linting fixes

* Use default number of steps for emcee

---------

Co-authored-by: Tyrone Rees <tyrone.rees@stfc.ac.uk>
  • Loading branch information
jess-farmer and tyronerees committed Apr 29, 2024
1 parent 138dfa5 commit 6987aec
Show file tree
Hide file tree
Showing 2 changed files with 58 additions and 34 deletions.
34 changes: 24 additions & 10 deletions fitbenchmarking/controllers/lmfit_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"""
import numpy as np
from lmfit import Minimizer, Parameters

from fitbenchmarking.controllers.base_controller import Controller
from fitbenchmarking.utils.exceptions import MissingBoundsError

Expand Down Expand Up @@ -38,7 +39,6 @@ class LmfitController(Controller):
'leastsq'],
'deriv_free': ['powell',
'cobyla',
'emcee',
'nelder',
'differential_evolution'],
'general': ['nelder',
Expand Down Expand Up @@ -72,8 +72,8 @@ class LmfitController(Controller):
'ampgo',
'shgo',
'dual_annealing'],
'MCMC': []
}
'MCMC': ['emcee']
}

jacobian_enabled_solvers = ['cg',
'bfgs',
Expand Down Expand Up @@ -115,6 +115,15 @@ def lmfit_resdiuals(self, params):
return self.cost_func.eval_r(list(map(lambda name: params[name].value,
self._param_names)))

def lmfit_loglike(self, params):
"""
lmfit resdiuals
"""
return self.cost_func.eval_loglike(
list(map(lambda name: params[name].value,
self.problem.param_names))
)

def lmfit_jacobians(self, params):
"""
lmfit jacobians
Expand All @@ -131,8 +140,8 @@ def setup(self):
if (self.value_ranges is None or np.any(np.isinf(self.value_ranges))) \
and self.minimizer in self.bound_minimizers:
raise MissingBoundsError(
f"{self.minimizer} requires finite bounds on all"
" parameters")
f"{self.minimizer} requires finite bounds on all"
" parameters")

for i, name in enumerate(self._param_names):
kwargs = {"name": name,
Expand All @@ -148,17 +157,19 @@ def fit(self):
"""
Run problem with lmfit
"""

minner = Minimizer(self.lmfit_resdiuals, self.lmfit_params)

kwargs = {"method": self.minimizer}
if self.minimizer == "emcee":
kwargs["progress"] = False
kwargs["burn"] = 300
minner = Minimizer(self.lmfit_loglike, self.lmfit_params)
else:
minner = Minimizer(self.lmfit_resdiuals, self.lmfit_params)

if self.minimizer in self.jacobian_enabled_solvers:
kwargs["Dfun"] = self.lmfit_jacobians
if self.cost_func.hessian and \
self.minimizer in self.hessian_enabled_solvers:
kwargs["hess"] = self.cost_func.hes_cost
if self.minimizer == "emcee":
kwargs["progress"] = False
self.lmfit_out = minner.minimize(**kwargs)

def cleanup(self):
Expand All @@ -172,5 +183,8 @@ def cleanup(self):
else:
self.flag = 2

if self.minimizer == 'emcee':
self.params_pdfs = self.lmfit_out.flatchain.to_dict(orient='list')

self.final_params = list(map(lambda params: params.value,
self.lmfit_out.params.values()))
58 changes: 34 additions & 24 deletions fitbenchmarking/controllers/mantid_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
Implements a controller for the Mantid fitting software.
"""

import numpy as np
from mantid import simpleapi as msapi
from mantid.fitfunctions import FunctionFactory, IFunction1D
import numpy as np

from fitbenchmarking.controllers.base_controller import Controller
from fitbenchmarking.cost_func.cost_func_factory import create_cost_func
Expand All @@ -25,32 +25,33 @@ class MantidController(Controller):
'nlls': 'Unweighted least squares',
'weighted_nlls': 'Least squares',
'poisson': 'Poisson',
'loglike_nlls': 'Least squares'
}

algorithm_check = {
'all': ['BFGS', 'Conjugate gradient (Fletcher-Reeves imp.)',
'all': ['BFGS', 'Conjugate gradient (Fletcher-Reeves imp.)',
'Conjugate gradient (Polak-Ribiere imp.)',
'Damped GaussNewton', 'Levenberg-Marquardt',
'Levenberg-MarquardtMD', 'Simplex', 'SteepestDescent',
'Trust Region', 'FABADA'],
'ls': ['Levenberg-Marquardt', 'Levenberg-MarquardtMD',
'Trust Region'],
'deriv_free': ['Simplex'],
'general': ['BFGS', 'Conjugate gradient (Fletcher-Reeves imp.)',
'Conjugate gradient (Polak-Ribiere imp.)',
'Damped GaussNewton', 'Levenberg-Marquardt',
'Levenberg-MarquardtMD', 'Simplex', 'SteepestDescent',
'Trust Region', 'FABADA'],
'ls': ['Levenberg-Marquardt', 'Levenberg-MarquardtMD',
'Trust Region', 'FABADA'],
'deriv_free': ['Simplex', 'FABADA'],
'general': ['BFGS', 'Conjugate gradient (Fletcher-Reeves imp.)',
'Conjugate gradient (Polak-Ribiere imp.)',
'Damped GaussNewton', 'Simplex', 'SteepestDescent'],
'simplex': ['Simplex'],
'trust_region': ['Trust Region', 'Levenberg-Marquardt',
'Levenberg-MarquardtMD'],
'levenberg-marquardt': ['Levenberg-Marquardt',
'Levenberg-MarquardtMD'],
'gauss_newton': ['Damped GaussNewton'],
'bfgs': ['BFGS'],
'conjugate_gradient': ['Conjugate gradient (Fletcher-Reeves imp.)',
'Conjugate gradient (Polak-Ribiere imp.)'],
'steepest_descent': ['SteepestDescent'],
'global_optimization': ['FABADA'],
'MCMC': []}
'Damped GaussNewton', 'Simplex', 'SteepestDescent'],
'simplex': ['Simplex'],
'trust_region': ['Trust Region', 'Levenberg-Marquardt',
'Levenberg-MarquardtMD'],
'levenberg-marquardt': ['Levenberg-Marquardt',
'Levenberg-MarquardtMD'],
'gauss_newton': ['Damped GaussNewton'],
'bfgs': ['BFGS'],
'conjugate_gradient': ['Conjugate gradient (Fletcher-Reeves imp.)',
'Conjugate gradient (Polak-Ribiere imp.)'],
'steepest_descent': ['SteepestDescent'],
'global_optimization': [],
'MCMC': ['FABADA']}

jacobian_enabled_solvers = ['BFGS',
'Conjugate gradient (Fletcher-Reeves imp.)',
Expand Down Expand Up @@ -240,7 +241,8 @@ def fit(self):
# to work; setting to the value in the mantid docs
minimizer_str += (",Chain Length=100000"
",Steps between values=10"
",Convergence Criteria=0.01")
",Convergence Criteria=0.01"
",PDF=1,ConvergedChain=chain")
self._added_args['MaxIterations'] = 2000000

fit_result = msapi.Fit(Function=self._mantid_function,
Expand Down Expand Up @@ -269,6 +271,14 @@ def cleanup(self):
self._mantid_results.OutputParameters.column(0),
self._mantid_results.OutputParameters.column(1)))

if self.minimizer == 'FABADA':
self.params_pdfs = {}
n_chains = \
self._mantid_results.ConvergedChain.getNumberHistograms()
for i in range(0, n_chains-1):
self.params_pdfs[self._param_names[i]] = \
self._mantid_results.ConvergedChain.readY(i).tolist()

if not self._multi_fit:
self.final_params = [final_params_dict[key]
for key in self._param_names]
Expand Down

0 comments on commit 6987aec

Please sign in to comment.