# scipy/scipy

FIX: don't use {} as default value for options parameter in optimize

1 parent 4208897 commit 1f921567952e1989787fe8b183eeae504184cb09 dlax committed Apr 19, 2012
8 scipy/optimize/_minimize.py
 @@ -29,7 +29,7 @@ def minimize(fun, x0, args=(), method='BFGS', jac=None, hess=None, hessp=None, bounds=None, constraints=(), - options=dict(), callback=None): + options=None, callback=None): """ Minimization of scalar function of one or more variables. @@ -292,6 +292,8 @@ def minimize(fun, x0, args=(), method='BFGS', jac=None, hess=None, It should converge to the theoretical solution (1.4 ,1.7). """ meth = method.lower() + if options is None: + options = {} # check if optional parameters are supported by the selected method # - jac if meth in ['nelder-mead', 'powell', 'anneal', 'cobyla'] and bool(jac): @@ -358,7 +360,7 @@ def minimize(fun, x0, args=(), method='BFGS', jac=None, hess=None, def minimize_scalar(fun, bracket=None, bounds=None, args=(), - method='brent', options=dict()): + method='brent', options=None): """ Minimization of scalar function of one variable. @@ -462,6 +464,8 @@ def minimize_scalar(fun, bracket=None, bounds=None, args=(), -2.0000002026 """ meth = method.lower() + if options is None: + options = {} if meth == 'brent': return _minimize_scalar_brent(fun, bracket, args, options)
4 scipy/optimize/anneal.py
 @@ -311,7 +311,7 @@ def anneal(func, x0, args=(), schedule='fast', full_output=0, else: return x, info['status'] -def _minimize_anneal(func, x0, args=(), options={}): +def _minimize_anneal(func, x0, args=(), options=None): """ Minimization of scalar function of one or more variables using the simulated annealing algorithm. @@ -350,6 +350,8 @@ def _minimize_anneal(func, x0, args=(), options={}): This function is called by the `minimize` function with `method=anneal`. It is not supposed to be called directly. """ + if options is None: + options = {} # retrieve useful options schedule = options.get('schedule', 'fast') T0 = options.get('T0')
4 scipy/optimize/cobyla.py
 @@ -162,7 +162,7 @@ def fmin_cobyla(func, x0, cons, args=(), consargs=None, rhobeg=1.0, rhoend=1e-4, return _minimize_cobyla(func, x0, args, constraints=con, options=opts)[0] -def _minimize_cobyla(fun, x0, args=(), constraints=(), options={}): +def _minimize_cobyla(fun, x0, args=(), constraints=(), options=None): """ Minimize a scalar function of one or more variables using the Constrained Optimization BY Linear Approximation (COBYLA) algorithm. @@ -182,6 +182,8 @@ def _minimize_cobyla(fun, x0, args=(), constraints=(), options={}): This function is called by the `minimize` function with `method=COBYLA`. It is not supposed to be called directly. """ + if options is None: + options = {} # retrieve useful options rhobeg = options.get('rhobeg', 1.0) rhoend = options.get('rhoend', 1e-4)
4 scipy/optimize/lbfgsb.py
 @@ -170,7 +170,7 @@ def fmin_l_bfgs_b(func, x0, fprime=None, args=(), return x, f, d -def _minimize_lbfgsb(fun, x0, args=(), jac=None, bounds=None, options={}): +def _minimize_lbfgsb(fun, x0, args=(), jac=None, bounds=None, options=None): """ Minimize a scalar function of one or more variables using the L-BFGS-B algorithm. @@ -204,6 +204,8 @@ def _minimize_lbfgsb(fun, x0, args=(), jac=None, bounds=None, options={}): This function is called by the `minimize` function with `method=L-BFGS-B`. It is not supposed to be called directly. """ + if options is None: + options = {} # retrieve useful options disp = options.get('disp', None) m = options.get('maxcor', 10)
32 scipy/optimize/optimize.py
 @@ -308,7 +308,7 @@ def fmin(func, x0, args=(), xtol=1e-4, ftol=1e-4, maxiter=None, maxfun=None, else: return x -def _minimize_neldermead(func, x0, args=(), options={}, callback=None): +def _minimize_neldermead(func, x0, args=(), options=None, callback=None): """ Minimization of scalar function of one or more variables using the Nelder-Mead algorithm. @@ -328,6 +328,8 @@ def _minimize_neldermead(func, x0, args=(), options={}, callback=None): This function is called by the `minimize` function with `method=Nelder-Mead`. It is not supposed to be called directly. """ + if options is None: + options = {} # retrieve useful options xtol = options.get('xtol', 1e-4) ftol = options.get('ftol', 1e-4) @@ -675,7 +677,7 @@ def fmin_bfgs(f, x0, fprime=None, args=(), gtol=1e-5, norm=Inf, else: return x -def _minimize_bfgs(fun, x0, args=(), jac=None, options={}, callback=None): +def _minimize_bfgs(fun, x0, args=(), jac=None, options=None, callback=None): """ Minimization of scalar function of one or more variables using the BFGS algorithm. @@ -698,6 +700,8 @@ def _minimize_bfgs(fun, x0, args=(), jac=None, options={}, callback=None): """ f = fun fprime = jac + if options is None: + options = {} # retrieve useful options gtol = options.get('gtol', 1e-5) norm = options.get('norm', Inf) @@ -913,7 +917,7 @@ def fmin_cg(f, x0, fprime=None, args=(), gtol=1e-5, norm=Inf, epsilon=_epsilon, else: return x -def _minimize_cg(fun, x0, args=(), jac=None, options={}, callback=None): +def _minimize_cg(fun, x0, args=(), jac=None, options=None, callback=None): """ Minimization of scalar function of one or more variables using the conjugate gradient algorithm. @@ -936,6 +940,8 @@ def _minimize_cg(fun, x0, args=(), jac=None, options={}, callback=None): """ f = fun fprime = jac + if options is None: + options = {} # retrieve useful options gtol = options.get('gtol', 1e-5) norm = options.get('norm', Inf) @@ -1154,7 +1160,7 @@ def fmin_ncg(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5, return x def _minimize_newtoncg(fun, x0, args=(), jac=None, hess=None, hessp=None, - options={}, callback=None): + options=None, callback=None): """ Minimization of scalar function of one or more variables using the Newton-CG algorithm. @@ -1181,6 +1187,8 @@ def _minimize_newtoncg(fun, x0, args=(), jac=None, hess=None, hessp=None, fprime = jac fhess_p = hessp fhess = hess + if options is None: + options = {} # retrieve useful options avextol = options.get('xtol', 1e-5) epsilon = options.get('eps', _epsilon) @@ -1357,7 +1365,9 @@ def fminbound(func, x1, x2, args=(), xtol=1e-5, maxfun=500, else: return x -def _minimize_scalar_bounded(func, bounds, args=(), options={}): +def _minimize_scalar_bounded(func, bounds, args=(), options=None): + if options is None: + options = {} # retrieve options xtol = options.get('xtol', 1e-5) maxfun = options.get('maxfev', 500) @@ -1676,7 +1686,9 @@ def brent(func, args=(), brack=None, tol=1.48e-8, full_output=0, maxiter=500): else: return x -def _minimize_scalar_brent(func, brack=None, args=(), options={}): +def _minimize_scalar_brent(func, brack=None, args=(), options=None): + if options is None: + options = {} # retrieve options tol = options.get('ftol', 1.48e-8) maxiter = options.get('maxiter', 500) @@ -1732,7 +1744,9 @@ def golden(func, args=(), brack=None, tol=_epsilon, full_output=0): else: return x -def _minimize_scalar_golden(func, brack=None, args=(), options={}): +def _minimize_scalar_golden(func, brack=None, args=(), options=None): + if options is None: + options = {} tol = options.get('ftol', _epsilon) if brack is None: xa, xb, xc, fa, fb, fc, funcalls = bracket(func, args=args) @@ -2005,7 +2019,7 @@ def fmin_powell(func, x0, args=(), xtol=1e-4, ftol=1e-4, maxiter=None, else: return x -def _minimize_powell(func, x0, args=(), options={}, callback=None): +def _minimize_powell(func, x0, args=(), options=None, callback=None): """ Minimization of scalar function of one or more variables using the modified Powell algorithm. @@ -2027,6 +2041,8 @@ def _minimize_powell(func, x0, args=(), options={}, callback=None): This function is called by the `minimize` function with `method=Powell`. It is not supposed to be called directly. """ + if options is None: + options = {} # retrieve useful options xtol = options.get('xtol', 1e-4) ftol = options.get('ftol', 1e-4)
4 scipy/optimize/slsqp.py
 @@ -192,7 +192,7 @@ def fmin_slsqp( func, x0 , eqcons=[], f_eqcons=None, ieqcons=[], f_ieqcons=None, return x def _minimize_slsqp(func, x0, args=(), jac=None, bounds=None, - constraints=(), options={}): + constraints=(), options=None): """ Minimize a scalar function of one or more variables using Sequential Least SQuares Programming (SLSQP). @@ -210,6 +210,8 @@ def _minimize_slsqp(func, x0, args=(), jac=None, bounds=None, `method=SLSQP`. It is not supposed to be called directly. """ fprime = jac + if options is None: + options = {} # retrieve useful options iter = options.get('maxiter', 100) acc = options.get('ftol', 1.0E-6)
4 scipy/optimize/tnc.py
 @@ -258,7 +258,7 @@ def fmin_tnc(func, x0, fprime=None, args=(), approx_grad=0, return x, info['nfev'], info['status'] -def _minimize_tnc(fun, x0, args=(), jac=None, bounds=None, options={}): +def _minimize_tnc(fun, x0, args=(), jac=None, bounds=None, options=None): """ Minimize a scalar function of one or more variables using a truncated Newton (TNC) algorithm. @@ -317,6 +317,8 @@ def _minimize_tnc(fun, x0, args=(), jac=None, bounds=None, options={}): This function is called by the `minimize` function with `method=TNC`. It is not supposed to be called directly. """ + if options is None: + options = {} # retrieve useful options epsilon = options.get('eps', 1e-8) scale = options.get('scale')