Skip to content

Commit

Permalink
Merge 9ee2d6a into 4f67d63
Browse files Browse the repository at this point in the history
  • Loading branch information
lucianopaz committed Jul 22, 2019
2 parents 4f67d63 + 9ee2d6a commit b584f6f
Show file tree
Hide file tree
Showing 4 changed files with 177 additions and 25 deletions.
1 change: 1 addition & 0 deletions RELEASE-NOTES.md
Expand Up @@ -11,6 +11,7 @@

### Maintenance
- Moved math operations out of `Rice`, `TruncatedNormal`, `Triangular` and `ZeroInflatedNegativeBinomial` `random` methods. Math operations on values returned by `draw_values` might not broadcast well, and all the `size` aware broadcasting is left to `generate_samples`. Fixes [#3481](https://github.com/pymc-devs/pymc3/issues/3481) and [#3508](https://github.com/pymc-devs/pymc3/issues/3508)
- Wrapped `DensityDist.rand` with `generate_samples` to make it aware of the distribution's shape. Added control flow attributes to still be able to behave as in earlier versions. Fixes [3553](https://github.com/pymc-devs/pymc3/issues/3553)

## PyMC3 3.7 (May 29 2019)

Expand Down
98 changes: 96 additions & 2 deletions pymc3/distributions/distribution.py
Expand Up @@ -211,16 +211,110 @@ class DensityDist(Distribution):
"""

def __init__(self, logp, shape=(), dtype=None, testval=0, random=None, *args, **kwargs):
def __init__(
self,
logp,
shape=(),
dtype=None,
testval=0,
random=None,
wrap_random_with_dist_shape=True,
check_shape_in_random=True,
*args,
**kwargs
):
"""
Parameters
----------
logp: callable
A callable that has the following signature ``logp(value)`` and
returns a theano tensor that represents the distribution's log
probability density.
shape: tuple (Optional): defaults to `()`
The shape of the distribution. The default value indicates a scalar.
If the distribution is *not* scalar-valued, the programmer should pass
a value here.
dtype: None, str (Optional)
The dtype of the distribution.
testval: number or array (Optional)
The ``testval`` of the RV's tensor that follow the ``DensityDist``
distribution.
random: None or callable (Optional)
If ``None``, no random method is attached to the ``DensityDist``
instance.
If a callable, it is used as the distribution's ``random`` method.
The behavior of this callable can be altered with the
``wrap_random_with_dist_shape`` parameter.
wrap_random_with_dist_shape: bool (Optional)
If ``True``, the provided ``random`` callable is passed through
``generate_samples`` to make the random number generator aware of
the ``DensityDist`` instance's ``shape``.
If ``False``, it is used exactly as it was provided.
check_shape_in_random: bool (Optional)
If ``True``, the shape of the random samples generate in the
``random`` method is checked with the expected return shape. This
test is only performed if ``wrap_random_with_dist_shape is False``.
args, kwargs: (Optional)
These are passed to the parent class' ``__init__``.
Note
----
If the ``random`` method is wrapped with dist shape, what this means
is that the ``random`` callable will be wrapped with the
:func:`~genereate_samples` function. The distribution's shape will
be passed to :func:`~generate_samples` as the ``dist_shape``
parameter.
"""
if dtype is None:
dtype = theano.config.floatX
super().__init__(shape, dtype, testval, *args, **kwargs)
self.logp = logp
self.rand = random
self.wrap_random_with_dist_shape = wrap_random_with_dist_shape
self.check_shape_in_random = check_shape_in_random

def random(self, *args, **kwargs):
if self.rand is not None:
return self.rand(*args, **kwargs)
if self.wrap_random_with_dist_shape:
samples = generate_samples(
self.rand, dist_shape=self.shape, *args, **kwargs
)
else:
samples = self.rand(*args, **kwargs)
if self.check_shape_in_random:
try:
size = args[1]
except IndexError:
size = kwargs.get("size", None)
expected_shape = (
self.shape
if size is None else
to_tuple(size) + self.shape
)
if not expected_shape == samples.shape:
raise RuntimeError(
"DensityDist encountered a shape inconsistency "
"while drawing samples using the supplied random "
"function. Was expecting to get samples of shape "
"{expected} but got {got} instead.\n"
"Whenever possible wrap_random_with_dist_shape = True "
"is recommended.\n"
"Be aware that the random callable provided as the "
"DensityDist random method cannot "
"adapt to shape changes in the distribution's "
"shape, which sometimes are necessary for sampling "
"when the model uses pymc3.Data or theano shared "
"tensors, or when the DensityDist has observed "
"values.\n"
"This check can be disabled by passing "
"check_shape_in_random=False when the DensityDist "
"is initialized.".
format(
expected=expected_shape,
got=samples.shape,
)
)
return samples
else:
raise ValueError("Distribution was not passed any random method "
"Define a custom random method and pass it as kwarg random")
Expand Down
101 changes: 79 additions & 22 deletions pymc3/tests/test_distributions_random.py
Expand Up @@ -922,28 +922,85 @@ def test_mixture_random_shape():
assert ppc['like3'].shape == (200, 20)


def test_density_dist_with_random_sampleable():
with pm.Model() as model:
mu = pm.Normal('mu', 0, 1)
normal_dist = pm.Normal.dist(mu, 1)
pm.DensityDist('density_dist', normal_dist.logp, observed=np.random.randn(100), random=normal_dist.random)
trace = pm.sample(100)

samples = 500
ppc = pm.sample_posterior_predictive(trace, samples=samples, model=model, size=100)
assert len(ppc['density_dist']) == samples


def test_density_dist_without_random_not_sampleable():
with pm.Model() as model:
mu = pm.Normal('mu', 0, 1)
normal_dist = pm.Normal.dist(mu, 1)
pm.DensityDist('density_dist', normal_dist.logp, observed=np.random.randn(100))
trace = pm.sample(100)

samples = 500
with pytest.raises(ValueError):
pm.sample_posterior_predictive(trace, samples=samples, model=model, size=100)
class TestDensityDist():
def test_density_dist_with_random_sampleable(self):
with pm.Model() as model:
mu = pm.Normal('mu', 0, 1)
normal_dist = pm.Normal.dist(mu, 1)
obs = pm.DensityDist('density_dist', normal_dist.logp, observed=np.random.randn(100), random=normal_dist.random)
trace = pm.sample(100)

samples = 500
size = 100
ppc = pm.sample_posterior_predictive(trace, samples=samples, model=model, size=size)
assert ppc['density_dist'].shape == (samples, size) + obs.distribution.shape

def test_density_dist_with_random_sampleable_failure(self):
with pm.Model() as model:
mu = pm.Normal('mu', 0, 1)
normal_dist = pm.Normal.dist(mu, 1)
pm.DensityDist(
'density_dist',
normal_dist.logp,
observed=np.random.randn(100),
random=normal_dist.random,
wrap_random_with_dist_shape=False
)
trace = pm.sample(100)

samples = 500
with pytest.raises(RuntimeError):
pm.sample_posterior_predictive(trace, samples=samples, model=model, size=100)

def test_density_dist_with_random_sampleable_hidden_error(self):
with pm.Model() as model:
mu = pm.Normal('mu', 0, 1)
normal_dist = pm.Normal.dist(mu, 1)
obs = pm.DensityDist(
'density_dist',
normal_dist.logp,
observed=np.random.randn(100),
random=normal_dist.random,
wrap_random_with_dist_shape=False,
check_shape_in_random=False
)
trace = pm.sample(100)

samples = 500
ppc = pm.sample_posterior_predictive(trace, samples=samples, model=model)
assert len(ppc['density_dist']) == samples
assert ((samples,) + obs.distribution.shape) != ppc['density_dist'].shape
print(obs.distribution.shape, ppc['density_dist'].shape)

def test_density_dist_with_random_sampleable_handcrafted_success(self):
with pm.Model() as model:
mu = pm.Normal('mu', 0, 1)
normal_dist = pm.Normal.dist(mu, 1)
rvs = pm.Normal.dist(mu, 1, shape=100).random
obs = pm.DensityDist(
'density_dist',
normal_dist.logp,
observed=np.random.randn(100),
random=rvs,
wrap_random_with_dist_shape=False
)
trace = pm.sample(100)

samples = 500
size = 100
ppc = pm.sample_posterior_predictive(trace, samples=samples, model=model, size=size)
assert ppc['density_dist'].shape == (samples, size) + obs.distribution.shape

def test_density_dist_without_random_not_sampleable(self):
with pm.Model() as model:
mu = pm.Normal('mu', 0, 1)
normal_dist = pm.Normal.dist(mu, 1)
pm.DensityDist('density_dist', normal_dist.logp, observed=np.random.randn(100))
trace = pm.sample(100)

samples = 500
with pytest.raises(ValueError):
pm.sample_posterior_predictive(trace, samples=samples, model=model, size=100)


class TestNestedRandom(SeededTest):
Expand Down
2 changes: 1 addition & 1 deletion pymc3/tests/test_models_utils.py
Expand Up @@ -41,7 +41,7 @@ def test_dict_input(self):
m, l = utils.any_to_tensor_and_labels(self.data.to_dict('list'))
self.assertMatrixLabels(m, l, mt=self.data[l].values, lt=l)

inp = {k: tt.as_tensor_variable(v) for k, v in self.data.to_dict('series').items()}
inp = {k: tt.as_tensor_variable(v.values) for k, v in self.data.to_dict('series').items()}
m, l = utils.any_to_tensor_and_labels(inp)
self.assertMatrixLabels(m, l, mt=self.data[l].values, lt=l)

Expand Down

0 comments on commit b584f6f

Please sign in to comment.