Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions RELEASE-NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
- `GLM.from_formula` and `LinearComponent.from_formula` can extract variables from the calling scope. Customizable via the new `eval_env` argument. Fixing #3382.
- Added the `distributions.shape_utils` module with functions used to help broadcast samples drawn from distributions using the `size` keyword argument.
- Used `numpy.vectorize` in `distributions.distribution._compile_theano_function`. This enables `sample_prior_predictive` and `sample_posterior_predictive` to ask for tuples of samples instead of just integers. This fixes issue #3422.
- Allow DiscreteUniform distribution to be used with CategoricalGibbsMetropolis step method.

### Maintenance
- Fixed an issue in `model_graph` that caused construction of the graph of the model for rendering to hang: replaced a search over the powerset of the nodes with a breadth-first search over the nodes. Fix for #3458.
Expand Down
10 changes: 9 additions & 1 deletion pymc3/step_methods/metropolis.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,9 +400,15 @@ def __init__(self, vars, proposal='uniform', order='random', model=None):
k = draw_values([distr.k])[0]
elif isinstance(distr, pm.Bernoulli) or (v.dtype in pm.bool_types):
k = 2
elif isinstance(distr, pm.DiscreteUniform):
k = draw_values([distr.upper])[0] + 1
if np.all(draw_values([distr.lower])):
raise ValueError('Parameter lower must be 0 to use DiscreteUniform' +
'with CategoricalGibbsMetropolis.')

else:
raise ValueError('All variables must be categorical or binary' +
'for CategoricalGibbsMetropolis')
'or DiscreteUniform for CategoricalGibbsMetropolis')
start = len(dimcats)
dimcats += [(dim, k) for dim in range(start, start + v.dsize)]

Expand Down Expand Up @@ -489,6 +495,8 @@ def competence(var):
return Competence.COMPATIBLE
elif isinstance(distribution, pm.Bernoulli) or (var.dtype in pm.bool_types):
return Competence.COMPATIBLE
elif isinstance(distribution, pm.DiscreteUniform) and draw_values([distribution.lower])[0] == 0:
return Competence.COMPATIBLE
return Competence.INCOMPATIBLE


Expand Down
26 changes: 26 additions & 0 deletions pymc3/tests/test_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@
Categorical,
Beta,
HalfNormal,
DiscreteUniform,
Poisson,
)

from numpy.testing import assert_array_almost_equal
Expand Down Expand Up @@ -1047,3 +1049,27 @@ def test_sampler_stats(self):
]
)
assert (trace.model_logp == model_logp_).all()


class TestDiscreteUniform:

def test_DiscreteUniform(self, lower=0, upper=200000000,
obs=5000000, draws=20000):
"""Test that DiscreteUniform distribution can use
CategoricalGibbsMetropolis step method."""
obs = theano.shared(obs)
with Model() as model2:

x = DiscreteUniform('x', lower, upper - 1)
sfs_obs = Poisson('sfs_obs', mu=x, observed=obs)

with model2:

step = CategoricalGibbsMetropolis([x])
trace = sample(draws, tune=0, step=step)
return trace

def test_bad_lower(self):
with pytest.raises(ValueError):
self.test_DiscreteUniform(lower=1, upper=200000001,
obs=5000000, draws=20000)