# scipy/scipy

Merge pull request #127 from dlaxalde/enh/optimize/ncg-hess

ENH: optimize/minimize: combine hess and hessp parameters
2 parents e5185f1 + c5f12ac commit c9c2d66701583984589c88c08c478e2fc6d9f4ec teoliphant committed
Showing with 143 additions and 38 deletions.
1. +9 −11 scipy/optimize/minimize.py
2. +28 −5 scipy/optimize/optimize.py
3. +106 −22 scipy/optimize/tests/test_optimize.py
20 scipy/optimize/minimize.py
 @@ -19,7 +19,7 @@ def minimize(fun, x0, args=(), method='Nelder-Mead', jac=None, hess=None, - hessp=None, options=dict(), full_output=False, callback=None, + options=dict(), full_output=False, callback=None, retall=False): """ Minimization of scalar function of one or more variables. @@ -39,15 +39,13 @@ def minimize(fun, x0, args=(), method='Nelder-Mead', jac=None, hess=None, jac : callable, optional Jacobian of objective function (if None, Jacobian will be estimated numerically). Only for CG, BFGS, Newton-CG. - hess, hessp : callable, optional - Hessian of objective function or Hessian of objective function - times an arbitrary vector p. Only for Newton-CG. - Only one of `hessp` or `hess` needs to be given. If `hess` is - provided, then `hessp` will be ignored. If neither `hess` nor - `hessp` is provided, then the hessian product will be approximated - using finite differences on `jac`. `hessp` must compute the hessian - times an arbitrary vector. If it is not given, finite-differences - on `jac` are used to compute it. + hess : callable, optional + Hessian of objective function. Only for Newton-CG. + The function `hess` can either return the Hessian matrix of `fun` + or the Hessian matrix times an arbitrary vector, in which case + it accepts an extra argument `p` as `hess(x, p, *args)`. + If `hess` is None, the Hessian will be approximated using finite + differences on `jac`. options : dict, optional A dictionary of solver options. All methods accept the following generic options: @@ -203,7 +201,7 @@ def minimize(fun, x0, args=(), method='Nelder-Mead', jac=None, hess=None, return _minimize_bfgs(fun, x0, args, jac, options, full_output, retall, callback) elif method.lower() == 'newton-cg': - return _minimize_newtoncg(fun, x0, args, jac, hess, hessp, options, + return _minimize_newtoncg(fun, x0, args, jac, hess, options, full_output, retall, callback) elif method.lower() == 'anneal': if callback:
33 scipy/optimize/optimize.py
 @@ -28,6 +28,7 @@ from linesearch import \ line_search_BFGS, line_search_wolfe1, line_search_wolfe2, \ line_search_wolfe2 as line_search +import inspect # standard status messages of optimizers @@ -1148,13 +1149,20 @@ def fmin_ncg(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5, 'maxiter': maxiter, 'disp': disp} + if fhess is not None: + hess = fhess + elif fhess_p is not None: + hess = fhess_p + else: + hess = None + # force full_output if retall=True to preserve backwards compatibility if retall and not full_output: - out = _minimize_newtoncg(f, x0, args, fprime, fhess, fhess_p, opts, + out = _minimize_newtoncg(f, x0, args, fprime, hess, opts, full_output=True, retall=retall, callback=callback) else: - out = _minimize_newtoncg(f, x0, args, fprime, fhess, fhess_p, opts, + out = _minimize_newtoncg(f, x0, args, fprime, hess, opts, full_output, retall, callback) if full_output: @@ -1171,7 +1179,7 @@ def fmin_ncg(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5, else: return out -def _minimize_newtoncg(fun, x0, args=(), jac=None, hess=None, hessp=None, +def _minimize_newtoncg(fun, x0, args=(), jac=None, hess=None, options={}, full_output=0, retall=0, callback=None): """ Minimization of scalar function of one or more variables using the @@ -1197,8 +1205,23 @@ def _minimize_newtoncg(fun, x0, args=(), jac=None, hess=None, hessp=None, raise ValueError('Jacobian is required for Newton-CG method') f = fun fprime = jac - fhess_p = hessp - fhess = hess + if hess is None: + fhess = None + fhess_p = None + else: + # check hessian type based on the number of arguments + fun_args = inspect.getargspec(fun)[0] + hess_args = inspect.getargspec(hess)[0] + if len(hess_args) == len(fun_args): + fhess = hess + fhess_p = None + elif len(hess_args) == len(fun_args) + 1: + fhess = None + fhess_p = hess + else: + raise ValueError('The number of arguments of the Hessian ' + 'function does not agree with that of the ' + 'objective function.') # retrieve useful options avextol = options.get('xtol', 1e-5) epsilon = options.get('eps', _epsilon)
128 scipy/optimize/tests/test_optimize.py