Skip to content
Permalink
Browse files

Deprecated nuts_kwargs and step_kwargs

  • Loading branch information
fonnesbeck committed Dec 27, 2018
1 parent e526c1e commit 88acc7c44e61a22fa3861515c4128f37d80904f3
"source": [
"with Centered_eight:\n",
" fit_cp85 = pm.sample(5000, chains=2, tune=2000,\n",
" nuts_kwargs=dict(target_accept=.85))"
" target_accept=.85)"
]
},
{
"source": [
"with Centered_eight:\n",
" fit_cp90 = pm.sample(5000, chains=2, tune=2000,\n",
" nuts_kwargs=dict(target_accept=.90))"
" target_accept=.90)"
]
},
{
"source": [
"with Centered_eight:\n",
" fit_cp95 = pm.sample(5000, chains=2, tune=2000,\n",
" nuts_kwargs=dict(target_accept=.95))"
" target_accept=.95)"
]
},
{
"source": [
"with Centered_eight:\n",
" fit_cp99 = pm.sample(5000, chains=2, tune=2000,\n",
" nuts_kwargs=dict(target_accept=.99))"
" target_accept=.99)"
]
},
{
"source": [
"with NonCentered_eight:\n",
" fit_ncp80 = pm.sample(5000, chains=2, tune=1000, random_seed=SEED,\n",
" nuts_kwargs=dict(target_accept=.80))"
" target_accept=.80)"
]
},
{
"source": [
"with NonCentered_eight:\n",
" fit_ncp90 = pm.sample(5000, chains=2, tune=1000, random_seed=SEED,\n",
" nuts_kwargs=dict(target_accept=.90))\n",
" target_accept=.90)\n",
" \n",
"# display the total number and percentage of divergent\n",
"divergent = fit_ncp90['diverging']\n",
@@ -309,7 +309,7 @@
" theta = pm.Beta('theta', alpha=ab[0], beta=ab[1], shape=N)\n",
"\n",
" p = pm.Binomial('y', p=theta, observed=y, n=n)\n",
" trace = pm.sample(1000, tune=2000, nuts_kwargs={'target_accept': .95})\n",
" trace = pm.sample(1000, tune=2000, target_accept=0.95)\n",
" "
]
},
@@ -328,7 +328,7 @@
"source": [
"with model_randomwalk:\n",
" trace_rw = pm.sample(tune=2000, cores=4, samples=200, \n",
" nuts_kwargs=dict(target_accept=.9))"
" target_accept=0.9)"
]
},
{
@@ -260,7 +260,7 @@
],
"source": [
"with model:\n",
" tr = pm.sample(1000, tune=1000, chains=2, cores=1, nuts_kwargs={\"target_accept\":0.95})"
" tr = pm.sample(1000, tune=1000, chains=2, cores=1, target_accept=0.95)"
]
},
{
@@ -595,7 +595,7 @@
],
"source": [
"with model:\n",
" tr = pm.sample(1000, tune=1000, chains=2, cores=1, nuts_kwargs={\"target_accept\":0.95})"
" tr = pm.sample(1000, tune=1000, chains=2, cores=1, target_accept=0.95)"
]
},
{
],
"source": [
"with model:\n",
" tr = pm.sample(500, chains=2, cores=1, nuts_kwargs={\"target_accept\": 0.95})"
" tr = pm.sample(500, chains=2, cores=1, target_accept=0.95)"
]
},
{
@@ -484,7 +484,7 @@
" # Proportion sptial variance\n",
" alpha = pm.Deterministic('alpha', sd_c/(sd_h+sd_c))\n",
"\n",
" trace1 = pm.sample(3e3, cores=2, tune=1000, nuts_kwargs={'max_treedepth': 15})"
" trace1 = pm.sample(3e3, cores=2, tune=1000, max_treedepth=15)"
]
},
{
@@ -702,7 +702,7 @@
" # Proportion sptial variance\n",
" alpha = pm.Deterministic('alpha', sd_c/(sd_h+sd_c))\n",
"\n",
" trace2 = pm.sample(3e3, cores=2, tune=1000, nuts_kwargs={'max_treedepth': 15})"
" trace2 = pm.sample(3e3, cores=2, tune=1000, max_treedepth=15)"
]
},
{
@@ -171,7 +171,7 @@
"source": [
"with baseball_model:\n",
" trace = pm.sample(2000, tune=1000, chains=2,\n",
" nuts_kwargs={'target_accept': 0.95})"
" target_accept=0.95)"
]
},
{
@@ -171,7 +171,7 @@
],
"source": [
"with model:\n",
" trace = pm.sample(tune=2000, nuts_kwargs=dict(target_accept=.9))"
" trace = pm.sample(tune=2000, target_accept=0.9)"
]
},
{
@@ -184,7 +184,7 @@
"with model_1:\n",
" # Increase tune and change init to avoid divergences\n",
" trace_1 = pm.sample(draws=1000, tune=1000,\n",
" nuts_kwargs={'target_accept': 0.9},\n",
" target_accept=0.9,\n",
" init='adapt_diag')"
]
},
@@ -337,7 +337,7 @@
"with model_2:\n",
" # Increase tune and target_accept to avoid divergences\n",
" trace_2 = pm.sample(draws=1000, tune=1000,\n",
" nuts_kwargs={'target_accept': 0.9})"
" target_accept=0.9)"
]
},
{
@@ -78,7 +78,7 @@ def run(n_samples=1000):
with model:
trace = pm.sample(draws=n_samples,
tune=1000,
nuts_kwargs=dict(target_accept=.99))
target_accept=.99)

pm.plots.traceplot(trace)
pm.plots.forestplot(trace)
@@ -28,7 +28,7 @@ def build_model():
def run(n=2000):
model = build_model()
with model:
trace = pm.sample(n, nuts_kwargs={'target_accept':.99})
trace = pm.sample(n, target_accept=0.99)

pm.traceplot(trace)

@@ -188,7 +188,7 @@ def _cpu_count():


def sample(draws=500, step=None, init='auto', n_init=200000, start=None, trace=None, chain_idx=0,
chains=None, cores=None, tune=500, nuts_kwargs=None, step_kwargs=None, progressbar=True,
chains=None, cores=None, tune=500, progressbar=True,
model=None, random_seed=None, live_plot=False, discard_tuned_samples=True,
live_plot_kwargs=None, compute_convergence_checks=True, **kwargs):
"""Draw samples from the posterior using the given step methods.
@@ -255,22 +255,6 @@ def sample(draws=500, step=None, init='auto', n_init=200000, start=None, trace=N
the step sizes, scalings or similar during tuning. Tuning samples will be drawn in addition
to the number specified in the `draws` argument, and will be discarded unless
`discard_tuned_samples` is set to False.
nuts_kwargs : dict
Options for the NUTS sampler. See the docstring of NUTS for a complete list of options.
Common options are:
* target_accept: float in [0, 1]. The step size is tuned such that we approximate this
acceptance rate. Higher values like 0.9 or 0.95 often work better for problematic
posteriors.
* max_treedepth: The maximum depth of the trajectory tree.
* step_scale: float, default 0.25
The initial guess for the step size scaled down by `1/n**(1/4)`.
If you want to pass options to other step methods, please use `step_kwargs`.
step_kwargs : dict
Options for step methods. Keys are the lower case names of the step method, values are
dicts of keyword arguments. You can find a full list of arguments in the docstring of the
step methods. If you want to pass arguments only to nuts, you can use `nuts_kwargs`.
progressbar : bool
Whether or not to display a progress bar in the command line. The bar shows the percentage
of completion, the sampling speed in samples per second (SPS), and the estimated remaining
@@ -294,6 +278,22 @@ def sample(draws=500, step=None, init='auto', n_init=200000, start=None, trace=N
trace : pymc3.backends.base.MultiTrace
A `MultiTrace` object that contains the samples.
Notes
-----
Optional keyword arguments can be passed to `sample` to be delivered to the
`step_method`s used during sampling. In particular, the NUTS step method accepts
a number of arguments. Common options are:
* target_accept: float in [0, 1]. The step size is tuned such that we approximate this
acceptance rate. Higher values like 0.9 or 0.95 often work better for problematic
posteriors.
* max_treedepth: The maximum depth of the trajectory tree.
* step_scale: float, default 0.25
The initial guess for the step size scaled down by `1/n**(1/4)`.
You can find a full list of arguments in the docstring of the step methods.
Examples
--------
.. code:: ipython
@@ -316,9 +316,20 @@ def sample(draws=500, step=None, init='auto', n_init=200000, start=None, trace=N
"""
model = modelcontext(model)

nuts_kwargs = kwargs.pop('nuts_kwargs', None)
if nuts_kwargs is not None:
warnings.warn("The nuts_kwargs argument has been deprecated. Pass step "
"method arguments directly to sample instead",
DeprecationWarning)
kwargs.update(nuts_kwargs)
step_kwargs = kwargs.pop('step_kwargs', None)
if step_kwargs is not None:
warnings.warn("The step_kwargs argument has been deprecated. Pass step "
"method arguments directly to sample instead",
DeprecationWarning)
kwargs.update(step_kwargs)

if isinstance(step, pm.step_methods.smc.SMC):
if step_kwargs is None:
step_kwargs = {}
trace = smc.sample_smc(draws=draws,
step=step,
progressbar=progressbar,
@@ -372,33 +383,26 @@ def sample(draws=500, step=None, init='auto', n_init=200000, start=None, trace=N

draws += tune

if nuts_kwargs is not None:
if step_kwargs is not None:
raise ValueError("Specify only one of step_kwargs and nuts_kwargs")
step_kwargs = {'nuts': nuts_kwargs}

if model.ndim == 0:
raise ValueError('The model does not contain any free variables.')

if step is None and init is not None and all_continuous(model.vars):
try:
# By default, try to use NUTS
_log.info('Auto-assigning NUTS sampler...')
args = step_kwargs if step_kwargs is not None else {}
args = args.get('nuts', {})
start_, step = init_nuts(init=init, chains=chains, n_init=n_init,
model=model, random_seed=random_seed,
progressbar=progressbar, **args)
progressbar=progressbar, **kwargs)
if start is None:
start = start_
except (AttributeError, NotImplementedError, tg.NullTypeGradError):
# gradient computation failed
_log.info("Initializing NUTS failed. "
"Falling back to elementwise auto-assignment.")
_log.debug('Exception in init nuts', exec_info=True)
step = assign_step_methods(model, step, step_kwargs=step_kwargs)
step = assign_step_methods(model, step, step_kwargs=kwargs)
else:
step = assign_step_methods(model, step, step_kwargs=step_kwargs)
step = assign_step_methods(model, step, step_kwargs=kwargs)

if isinstance(step, list):
step = CompoundStep(step)
@@ -77,11 +77,7 @@ def test_sample_args(self):
pm.sample(50, tune=0, init=None, step_kwargs={'foo': {}})
assert 'foo' in str(excinfo.value)

pm.sample(10, tune=0, init=None, nuts_kwargs={'target_accept': 0.9})

with pytest.raises(ValueError) as excinfo:
pm.sample(5, tune=0, init=None, step_kwargs={}, nuts_kwargs={})
assert 'Specify only one' in str(excinfo.value)
pm.sample(10, tune=0, init=None, target_accept=0.9)

def test_iter_sample(self):
with self.model:

0 comments on commit 88acc7c

Please sign in to comment.
You can’t perform that action at this time.