Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

ENH: optimize/minimize: combine hess and hessp parameters

  • Loading branch information...
commit c5f12acf3910068c94d8caa5360e041bbcb47912 1 parent efff960
Denis Laxalde authored
View
20 scipy/optimize/minimize.py
@@ -19,7 +19,7 @@
def minimize(fun, x0, args=(), method='Nelder-Mead', jac=None, hess=None,
- hessp=None, options=dict(), full_output=False, callback=None,
+ options=dict(), full_output=False, callback=None,
retall=False):
"""
Minimization of scalar function of one or more variables.
@@ -39,15 +39,13 @@ def minimize(fun, x0, args=(), method='Nelder-Mead', jac=None, hess=None,
jac : callable, optional
Jacobian of objective function (if None, Jacobian will be
estimated numerically). Only for CG, BFGS, Newton-CG.
- hess, hessp : callable, optional
- Hessian of objective function or Hessian of objective function
- times an arbitrary vector p. Only for Newton-CG.
- Only one of `hessp` or `hess` needs to be given. If `hess` is
- provided, then `hessp` will be ignored. If neither `hess` nor
- `hessp` is provided, then the hessian product will be approximated
- using finite differences on `jac`. `hessp` must compute the hessian
- times an arbitrary vector. If it is not given, finite-differences
- on `jac` are used to compute it.
+ hess : callable, optional
+ Hessian of objective function. Only for Newton-CG.
+ The function `hess` can either return the Hessian matrix of `fun`
+ or the Hessian matrix times an arbitrary vector, in which case
+ it accepts an extra argument `p` as `hess(x, p, *args)`.
+ If `hess` is None, the Hessian will be approximated using finite
+ differences on `jac`.
options : dict, optional
A dictionary of solver options. All methods accept the following
generic options:
@@ -203,7 +201,7 @@ def minimize(fun, x0, args=(), method='Nelder-Mead', jac=None, hess=None,
return _minimize_bfgs(fun, x0, args, jac, options, full_output,
retall, callback)
elif method.lower() == 'newton-cg':
- return _minimize_newtoncg(fun, x0, args, jac, hess, hessp, options,
+ return _minimize_newtoncg(fun, x0, args, jac, hess, options,
full_output, retall, callback)
elif method.lower() == 'anneal':
if callback:
View
33 scipy/optimize/optimize.py
@@ -28,6 +28,7 @@
from linesearch import \
line_search_BFGS, line_search_wolfe1, line_search_wolfe2, \
line_search_wolfe2 as line_search
+import inspect
# standard status messages of optimizers
@@ -1148,13 +1149,20 @@ def fmin_ncg(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5,
'maxiter': maxiter,
'disp': disp}
+ if fhess is not None:
+ hess = fhess
+ elif fhess_p is not None:
+ hess = fhess_p
+ else:
+ hess = None
+
# force full_output if retall=True to preserve backwards compatibility
if retall and not full_output:
- out = _minimize_newtoncg(f, x0, args, fprime, fhess, fhess_p, opts,
+ out = _minimize_newtoncg(f, x0, args, fprime, hess, opts,
full_output=True, retall=retall,
callback=callback)
else:
- out = _minimize_newtoncg(f, x0, args, fprime, fhess, fhess_p, opts,
+ out = _minimize_newtoncg(f, x0, args, fprime, hess, opts,
full_output, retall, callback)
if full_output:
@@ -1171,7 +1179,7 @@ def fmin_ncg(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5,
else:
return out
-def _minimize_newtoncg(fun, x0, args=(), jac=None, hess=None, hessp=None,
+def _minimize_newtoncg(fun, x0, args=(), jac=None, hess=None,
options={}, full_output=0, retall=0, callback=None):
"""
Minimization of scalar function of one or more variables using the
@@ -1197,8 +1205,23 @@ def _minimize_newtoncg(fun, x0, args=(), jac=None, hess=None, hessp=None,
raise ValueError('Jacobian is required for Newton-CG method')
f = fun
fprime = jac
- fhess_p = hessp
- fhess = hess
+ if hess is None:
+ fhess = None
+ fhess_p = None
+ else:
+ # check hessian type based on the number of arguments
+ fun_args = inspect.getargspec(fun)[0]
+ hess_args = inspect.getargspec(hess)[0]
+ if len(hess_args) == len(fun_args):
+ fhess = hess
+ fhess_p = None
+ elif len(hess_args) == len(fun_args) + 1:
+ fhess = None
+ fhess_p = hess
+ else:
+ raise ValueError('The number of arguments of the Hessian '
+ 'function does not agree with that of the '
+ 'objective function.')
# retrieve useful options
avextol = options.get('xtol', 1e-5)
epsilon = options.get('eps', _epsilon)
View
2  scipy/optimize/tests/test_optimize.py
@@ -331,7 +331,7 @@ def test_ncg_hessp(self, use_wrapper=False):
opts = {'maxit': self.maxiter, 'disp': False}
retval = optimize.minimize(self.func, self.startparams,
method='Newton-CG', jac=self.grad,
- hessp = self.hessp,
+ hess = self.hessp,
args=(), options=opts,
full_output=False, retall=False)
else:
Please sign in to comment.
Something went wrong with that request. Please try again.