Skip to content

Commit

Permalink
Changed case of init arguments in sample to upper case, but kept back…
Browse files Browse the repository at this point in the history
…ward-compatibility with lower case
  • Loading branch information
fonnesbeck committed Jan 18, 2017
1 parent b6827a6 commit da67ffa
Show file tree
Hide file tree
Showing 7 changed files with 30 additions and 25 deletions.
2 changes: 1 addition & 1 deletion pymc3/examples/arbitrary_stochastic.py
Expand Up @@ -20,7 +20,7 @@ def run(n_samples=3000):
start = model.test_point
h = pm.find_hessian(start, model=model)
step = pm.Metropolis(model.vars, h, blocked=True, model=model)
trace = pm.sample(n_samples, step, start, model=model)
trace = pm.sample(n_samples, step=step, start=start, model=model)
return trace

if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion pymc3/examples/factor_potential.py
Expand Up @@ -13,7 +13,7 @@ def run(n=3000):
if n == "short":
n = 50
with model:
trace = sample(n, step, start)
trace = sample(n, step=step, start=start)

if __name__ == '__main__':
run()
31 changes: 18 additions & 13 deletions pymc3/sampling.py
Expand Up @@ -98,20 +98,20 @@ def sample(draws, step=None, init='advi', n_init=200000, start=None,
A step function or collection of functions. If no step methods are
specified, or are partially specified, they will be assigned
automatically (defaults to None).
init : str {'advi', 'advi_map', 'map', 'nuts', None}
init : str {'ADVI', 'ADVI_MAP', 'MAP', 'NUTS', None}
Initialization method to use.
* advi : Run ADVI to estimate starting points and diagonal covariance
* ADVI : Run ADVI to estimate starting points and diagonal covariance
matrix. If njobs > 1 it will sample starting points from the estimated
posterior, otherwise it will use the estimated posterior mean.
* advi_map: Initialize ADVI with MAP and use MAP as starting point.
* map : Use the MAP as starting point.
* nuts : Run NUTS to estimate starting points and covariance matrix. If
* ADVI_MAP: Initialize ADVI with MAP and use MAP as starting point.
* MAP : Use the MAP as starting point.
* NUTS : Run NUTS to estimate starting points and covariance matrix. If
njobs > 1 it will sample starting points from the estimated posterior,
otherwise it will use the estimated posterior mean.
* None : Do not initialize.
n_init : int
Number of iterations of initializer
If 'advi', number of iterations, if 'nuts', number of draws.
If 'ADVI', number of iterations, if 'nuts', number of draws.
start : dict
Starting point in parameter space (or partial point)
Defaults to trace.point(-1)) if there is a trace provided and
Expand Down Expand Up @@ -147,6 +147,8 @@ def sample(draws, step=None, init='advi', n_init=200000, start=None,
"""
model = modelcontext(model)

if init is not None:
init = init.lower()

if step is None and init is not None and pm.model.all_continuous(model.vars):
# By default, use NUTS sampler
Expand Down Expand Up @@ -398,7 +400,7 @@ def sample_ppc(trace, samples=None, model=None, vars=None, size=None, random_see
return {k: np.asarray(v) for k, v in ppc.items()}


def init_nuts(init='advi', njobs=1, n_init=500000, model=None,
def init_nuts(init='ADVI', njobs=1, n_init=500000, model=None,
random_seed=-1, **kwargs):
"""Initialize and sample from posterior of a continuous model.
Expand All @@ -409,17 +411,17 @@ def init_nuts(init='advi', njobs=1, n_init=500000, model=None,
Parameters
----------
init : str {'advi', 'advi_map', 'map', 'nuts'}
init : str {'ADVI', 'ADVI_MAP', 'MAP', 'NUTS'}
Initialization method to use.
* advi : Run ADVI to estimate posterior mean and diagonal covariance matrix.
* advi_map: Initialize ADVI with MAP and use MAP as starting point.
* map : Use the MAP as starting point.
* nuts : Run NUTS and estimate posterior mean and covariance matrix.
* ADVI : Run ADVI to estimate posterior mean and diagonal covariance matrix.
* ADVI_MAP: Initialize ADVI with MAP and use MAP as starting point.
* MAP : Use the MAP as starting point.
* NUTS : Run NUTS and estimate posterior mean and covariance matrix.
njobs : int
Number of parallel jobs to start.
n_init : int
Number of iterations of initializer
If 'advi', number of iterations, if 'metropolis', number of draws.
If 'ADVI', number of iterations, if 'metropolis', number of draws.
model : Model (optional if in `with` context)
**kwargs : keyword arguments
Extra keyword arguments are forwarded to pymc3.NUTS.
Expand All @@ -439,6 +441,9 @@ def init_nuts(init='advi', njobs=1, n_init=500000, model=None,
pm._log.info('Initializing NUTS using {}...'.format(init))

random_seed = int(np.atleast_1d(random_seed)[0])

if init is not None:
init = init.lower()

if init == 'advi':
v_params = pm.variational.advi(n=n_init, random_seed=random_seed)
Expand Down
6 changes: 3 additions & 3 deletions pymc3/tests/test_examples.py
Expand Up @@ -54,7 +54,7 @@ def test_run(self):
h = H(start)

step = pm.HamiltonianMC(model.vars, h)
pm.sample(50, step, start)
pm.sample(50, step=step, start=start)


class TestARM12_6(SeededTest):
Expand Down Expand Up @@ -88,7 +88,7 @@ def too_slow(self):
start = pm.find_MAP(start=start,
vars=[model['groupmean'], model['sd_interval_'], model['floor_m']])
step = pm.NUTS(model.vars, scaling=start)
pm.sample(50, step, start)
pm.sample(50, step=step, start=start)


class TestARM12_6Uranium(SeededTest):
Expand Down Expand Up @@ -129,7 +129,7 @@ def too_slow(self):
h = np.diag(H(start))

step = pm.HamiltonianMC(model.vars, h)
pm.sample(50, step, start)
pm.sample(50, step=step, start=start)


def build_disaster_model(masked=False):
Expand Down
2 changes: 1 addition & 1 deletion pymc3/tests/test_glm.py
Expand Up @@ -34,7 +34,7 @@ def test_linear_component(self):
Normal('y_obs', mu=y_est, sd=sigma, observed=self.y_linear)
start = find_MAP(vars=[sigma])
step = Slice(model.vars)
trace = sample(500, step, start, progressbar=False, random_seed=self.random_seed)
trace = sample(500, step=step, start=start, progressbar=False, random_seed=self.random_seed)

self.assertAlmostEqual(np.mean(trace['Intercept']), self.intercept, 1)
self.assertAlmostEqual(np.mean(trace['x']), self.slope, 1)
Expand Down
8 changes: 4 additions & 4 deletions pymc3/tests/test_models_linear.py
Expand Up @@ -44,7 +44,7 @@ def test_linear_component(self):
Normal('y_obs', mu=lm.y_est, sd=sigma, observed=self.y_linear) # yields y_obs
start = find_MAP(vars=[sigma])
step = Slice(model.vars)
trace = sample(500, step, start, progressbar=False, random_seed=self.random_seed)
trace = sample(500, step=step, start=start, progressbar=False, random_seed=self.random_seed)

self.assertAlmostEqual(np.mean(trace['lm_Intercept']), self.intercept, 1)
self.assertAlmostEqual(np.mean(trace['lm_x0']), self.slope, 1)
Expand All @@ -58,7 +58,7 @@ def test_linear_component_from_formula(self):
Normal('y_obs', mu=lm.y_est, sd=sigma, observed=self.y_linear)
start = find_MAP(vars=[sigma])
step = Slice(model.vars)
trace = sample(500, step, start, progressbar=False, random_seed=self.random_seed)
trace = sample(500, step=step, start=start, progressbar=False, random_seed=self.random_seed)

self.assertAlmostEqual(np.mean(trace['Intercept']), self.intercept, 1)
self.assertAlmostEqual(np.mean(trace['x']), self.slope, 1)
Expand All @@ -79,7 +79,7 @@ def test_glm(self):
)
start = find_MAP()
step = Slice(model.vars)
trace = sample(500, step, start, progressbar=False, random_seed=self.random_seed)
trace = sample(500, step=step, start=start, progressbar=False, random_seed=self.random_seed)
self.assertAlmostEqual(np.mean(trace['glm_Intercept']), self.intercept, 1)
self.assertAlmostEqual(np.mean(trace['glm_x0']), self.slope, 1)
self.assertAlmostEqual(np.mean(trace['glm_sd']), self.sd, 1)
Expand All @@ -91,7 +91,7 @@ def test_glm_from_formula(self):
Glm.from_formula('y ~ x', self.data_linear, name=NAME)
start = find_MAP()
step = Slice(model.vars)
trace = sample(500, step, start, progressbar=False, random_seed=self.random_seed)
trace = sample(500, step=step, start=start, progressbar=False, random_seed=self.random_seed)

self.assertAlmostEqual(np.mean(trace['%s_Intercept' % NAME]), self.intercept, 1)
self.assertAlmostEqual(np.mean(trace['%s_x' % NAME]), self.slope, 1)
Expand Down
4 changes: 2 additions & 2 deletions pymc3/tests/test_plots.py
Expand Up @@ -20,7 +20,7 @@ def test_plots():
start = model.test_point
h = find_hessian(start)
step = Metropolis(model.vars, h)
trace = sample(3000, step, start)
trace = sample(3000, step=step, start=start)

traceplot(trace)
forestplot(trace)
Expand All @@ -34,7 +34,7 @@ def test_plots_multidimensional():
with model as model:
h = np.diag(find_hessian(start))
step = Metropolis(model.vars, h)
trace = sample(3000, step, start)
trace = sample(3000, step=step, start=start)

traceplot(trace)
plot_posterior(trace)
Expand Down

0 comments on commit da67ffa

Please sign in to comment.