Skip to content

Commit

Permalink
sound maxfun for l-bfgs-b
Browse files Browse the repository at this point in the history
  • Loading branch information
François Laurent committed Nov 12, 2019
1 parent 3b7c617 commit 5d693b5
Show file tree
Hide file tree
Showing 7 changed files with 81 additions and 37 deletions.
2 changes: 1 addition & 1 deletion doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def __getattr__(cls, name):
# built documents.
#
# The short X.Y version.
version = u'0.4.5'
version = u'0.4.6'
# The full version, including alpha/beta/rc tags.
release = u'0.4'

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

setup(
name = 'tramway',
version = '0.4.5',
version = '0.4.6',
description = 'TRamWAy',
long_description = long_description,
url = 'https://github.com/DecBayComp/TRamWAy',
Expand Down
4 changes: 2 additions & 2 deletions tests/test_commandline.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
import random


py2_hash, py3_hash = 'ycyrC4rP', 'pIdDAnzt'
py2_hash, py3_hash = 'MGtbXz14', 'mLtwdRkb'
data_server = 'http://dl.pasteur.fr/fop/{}/'.format(py2_hash if sys.version_info[0] == 2 else py3_hash)
data_update = '191031'
data_update = '191112'
data_file = 'glycine_receptor.trxyt'

data_dir = '{}_py{}_{}'.format('test_commandline', sys.version_info[0], data_update)
Expand Down
36 changes: 20 additions & 16 deletions tramway/inference/dv.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,24 +322,28 @@ def inferDV(cells, diffusivity_prior=None, potential_prior=None, \
grad_kwargs = get_grad_kwargs(kwargs, epsilon=epsilon, compatibility=compatibility)

# parametrize the optimization algorithm
default_BFGS_options = dict(maxiter=1e3, disp=verbose)
if min_diffusivity not in (False, None):
default_BFGS_options.update(dict(maxcor=dv.combined.size, ftol=1e-8))
#default_BFGS_options = dict(maxiter=1e3, disp=verbose)
options = kwargs.pop('options', default_BFGS_options)
if max_iter:
options['maxiter'] = max_iter
V_bounds = [(None, None)] * V_initial.size
if min_diffusivity not in (False, None):
default_lBFGSb_options = dict(maxiter=1e3, maxfun=1e10, maxcor=dv.combined.size, ftol=1e-8)
# in L-BFGS-B the number of iterations is usually very low (~10-100) while the number of
# function evaluations is much higher (~1e4-1e5);
# with maxfun defined, an iteration can stop anytime and the optimization may terminate
# with an error message
if min_diffusivity is None:
bounds = None
options = {}
else:
V_bounds = [(None, None)] * V_initial.size
bounds = D_bounds + V_bounds
options['maxfun'] = 1e10
# in L-BFGS-B the number of iterations is usually very low (~10-100) while the number of
# function evaluations is much higher (~1e4-1e5);
# with maxfun defined, an iteration can stop anytime and the optimization may terminate
# with an error message
options.update(kwargs)
options = dict(default_lBFGSb_options)
options.update(kwargs.pop('options', {}))
options.update(**kwargs) # for backward compatibility
if max_iter:
options['maxiter'] = max_iter
if verbose:
options['disp'] = verbose
if options:
_kwargs = dict(options = options)
else:
_kwargs = {}

# posterior function
if rgrad in ('delta','delta0','delta1'):
Expand All @@ -361,7 +365,7 @@ def inferDV(cells, diffusivity_prior=None, potential_prior=None, \
args = args + (y0, 1 < int(verbose), posteriors)

# run the optimization routine
result = minimize(fun, dv.combined, args=args, bounds=bounds, options=options)
result = minimize(fun, dv.combined, args=args, bounds=bounds, **_kwargs)
if not (result.success or verbose):
warn('{}'.format(result.message), OptimizationWarning)

Expand Down
31 changes: 22 additions & 9 deletions tramway/inference/standard_d.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def d_neg_posterior1(diffusivity, cells, sigma2, diffusivity_prior, \


def infer_smooth_D(cells, diffusivity_prior=None, jeffreys_prior=None, \
min_diffusivity=None, max_iter=None, epsilon=None, rgrad=None, **kwargs):
min_diffusivity=None, max_iter=None, epsilon=None, rgrad=None, verbose=False, **kwargs):

# initial values
localization_error = cells.get_localization_error(kwargs, 0.03, True)
Expand All @@ -150,12 +150,23 @@ def infer_smooth_D(cells, diffusivity_prior=None, jeffreys_prior=None, \
# gradient options
grad_kwargs = get_grad_kwargs(kwargs, epsilon=epsilon)

# parametrize the optimization procedure
if min_diffusivity is not None:
# parametrize the optimization algorithm
default_lBFGSb_options = dict(maxiter=1e3, maxfun=1e10, ftol=1e-6)
# in L-BFGS-B the number of iterations is usually very low (~10-100) while the number of
# function evaluations is much higher (~1e4-1e5);
# with maxfun defined, an iteration can stop anytime and the optimization may terminate
# with an error message
if min_diffusivity is None:
options = {}
else:
kwargs['bounds'] = D_bounds
options = dict(default_lBFGSb_options)
options.update(kwargs.pop('options', {}))
if max_iter:
options = kwargs.get('options', {})
options['maxiter'] = max_iter
if verbose:
options['disp'] = verbose
if options:
kwargs['options'] = options

# posterior function
Expand All @@ -166,14 +177,16 @@ def infer_smooth_D(cells, diffusivity_prior=None, jeffreys_prior=None, \
warn('unsupported rgrad: {}'.format(rgrad), RuntimeWarning)
fun = smooth_d_neg_posterior

args = (cells, localization_error, diffusivity_prior, jeffreys_prior, dt_mean, min_diffusivity, index, reverse_index, grad_kwargs)

# run the optimization
result = minimize(fun, D_initial, \
args=(cells, localization_error, diffusivity_prior, jeffreys_prior, dt_mean, min_diffusivity, index, reverse_index, grad_kwargs), \
**kwargs)
result = minimize(fun, D_initial, args=args, **kwargs)
if not (result.success or verbose):
warn('{}'.format(result.message), OptimizationWarning)

# format the result
D = result.x
DD = pd.DataFrame(D, index=index, columns=['diffusivity'])
D = pd.DataFrame(D, index=index, columns=['diffusivity'])

return DD
return D

21 changes: 17 additions & 4 deletions tramway/inference/standard_ddrift.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,26 +109,37 @@ def dd_neg_posterior1(x, dd, cells, sigma2, diffusivity_prior, drift_prior,


def infer_smooth_DD(cells, diffusivity_prior=None, drift_prior=None, jeffreys_prior=False,
min_diffusivity=None, max_iter=None, epsilon=None, rgrad=None, **kwargs):
min_diffusivity=None, max_iter=None, epsilon=None, rgrad=None, verbose=False, **kwargs):

# initial values
localization_error = cells.get_localization_error(kwargs, 0.03, True)
index, reverse_index, n, dt_mean, D_initial, min_diffusivity, D_bounds, _ = \
smooth_infer_init(cells, min_diffusivity=min_diffusivity, jeffreys_prior=jeffreys_prior,
sigma2=localization_error)
initial_drift = np.zeros((len(index), cells.dim), dtype=D_initial.dtype)
drift_bounds = [(None, None)] * initial_drift.size # no bounds
dd = ChainArray('D', D_initial, 'drift', initial_drift)

# gradient options
grad_kwargs = get_grad_kwargs(epsilon=epsilon, **kwargs)

# parametrize the optimization algorithm
if min_diffusivity is not None:
default_lBFGSb_options = dict(maxiter=1e3, maxfun=1e10, ftol=1e-6)
# in L-BFGS-B the number of iterations is usually very low (~10-100) while the number of
# function evaluations is much higher (~1e4-1e5);
# with maxfun defined, an iteration can stop anytime and the optimization may terminate
# with an error message
if min_diffusivity is None:
options = {}
else:
drift_bounds = [(None, None)] * initial_drift.size # no bounds
kwargs['bounds'] = D_bounds + drift_bounds
options = dict(default_lBFGSb_options)
options.update(kwargs.pop('options', {}))
if max_iter:
options = kwargs.get('options', {})
options['maxiter'] = max_iter
if verbose:
options['disp'] = verbose
if options:
kwargs['options'] = options

# posterior function
Expand All @@ -144,6 +155,8 @@ def infer_smooth_DD(cells, diffusivity_prior=None, drift_prior=None, jeffreys_pr
args = (dd, cells, localization_error, diffusivity_prior, drift_prior, jeffreys_prior, \
dt_mean, min_diffusivity, index, reverse_index, grad_kwargs)
result = minimize(fun, dd.combined, args=args, **kwargs)
if not (result.success or verbose):
warn('{}'.format(result.message), OptimizationWarning)

# collect the result
dd.update(result.x)
Expand Down
22 changes: 18 additions & 4 deletions tramway/inference/standard_df.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,8 @@ def df_neg_posterior1(x, df, cells, sigma2, diffusivity_prior,


def infer_smooth_DF(cells, diffusivity_prior=None, force_prior=None, potential_prior=None,
jeffreys_prior=False, min_diffusivity=None, max_iter=None, epsilon=None, rgrad=None, **kwargs):
jeffreys_prior=False, min_diffusivity=None, max_iter=None, epsilon=None, rgrad=None,
verbose=False, **kwargs):
"""
Argument `potential_prior` is an alias for `force_prior` which penalizes the large force amplitudes.
"""
Expand All @@ -125,18 +126,29 @@ def infer_smooth_DF(cells, diffusivity_prior=None, force_prior=None, potential_p
smooth_infer_init(cells, min_diffusivity=min_diffusivity, jeffreys_prior=jeffreys_prior,
sigma2=localization_error)
F_initial = np.zeros((len(index), cells.dim), dtype=D_initial.dtype)
F_bounds = [(None, None)] * F_initial.size # no bounds
df = ChainArray('D', D_initial, 'F', F_initial)

# gradient options
grad_kwargs = get_grad_kwargs(epsilon=epsilon, **kwargs)

# parametrize the optimization algorithm
if min_diffusivity is not None:
default_lBFGSb_options = dict(maxiter=1e3, maxfun=1e10, ftol=1e-6)
# in L-BFGS-B the number of iterations is usually very low (~10-100) while the number of
# function evaluations is much higher (~1e4-1e5);
# with maxfun defined, an iteration can stop anytime and the optimization may terminate
# with an error message
if min_diffusivity is None:
options = {}
else:
F_bounds = [(None, None)] * F_initial.size # no bounds
kwargs['bounds'] = D_bounds + F_bounds
options = dict(default_lBFGSb_options)
options.update(kwargs.pop('options', {}))
if max_iter:
options = kwargs.get('options', {})
options['maxiter'] = max_iter
if verbose:
options['disp'] = verbose
if options:
kwargs['options'] = options

# posterior function
Expand All @@ -157,6 +169,8 @@ def infer_smooth_DF(cells, diffusivity_prior=None, force_prior=None, potential_p
#cell.cache = None # no cache needed
args = (df, cells, localization_error, diffusivity_prior, force_prior, jeffreys_prior, dt_mean, min_diffusivity, index, reverse_index, grad_kwargs)
result = minimize(fun, df.combined, args=args, **kwargs)
if not (result.success or verbose):
warn('{}'.format(result.message), OptimizationWarning)

# collect the result
df.update(result.x)
Expand Down

0 comments on commit 5d693b5

Please sign in to comment.