diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 583494898b..f038c0f99c 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -11,6 +11,7 @@ - Added the `broadcast_distribution_samples` function that helps broadcasting arrays of drawn samples, taking into account the requested `size` and the inferred distribution shape. This sometimes is needed by distributions that call several `rvs` separately within their `random` method, such as the `ZeroInflatedPoisson` (Fix issue #3310). - The `Wald`, `Kumaraswamy`, `LogNormal`, `Pareto`, `Cauchy`, `HalfCauchy`, `Weibull` and `ExGaussian` distributions `random` method used a hidden `_random` function that was written with scalars in mind. This could potentially lead to artificial correlations between random draws. Added shape guards and broadcasting of the distribution samples to prevent this (Similar to issue #3310). - Added a fix to allow the imputation of single missing values of observed data, which previously would fail (Fix issue #3122). +- Fix for #3354. `draw_values` now adds the theano graph descendants of `TensorConstant` or `SharedVariables` to the named relationship nodes stack, only if these descendants are `ObservedRV` or `MultiObservedRV` instances. ### Deprecations diff --git a/pymc3/distributions/distribution.py b/pymc3/distributions/distribution.py index 248720101d..d206b75528 100644 --- a/pymc3/distributions/distribution.py +++ b/pymc3/distributions/distribution.py @@ -366,6 +366,14 @@ def draw_values(params, point=None, size=None): # ('Constants not allowed in param list', ...)` for # TensorConstant, and a `TypeError: Cannot use a shared # variable (...) as explicit input` for SharedVariable. + # ObservedRV and MultiObservedRV instances are ViewOPs + # of TensorConstants or SharedVariables, we must add them + # to the stack or risk evaluating deterministics with the + # wrong values (issue #3354) + stack.extend([node for node in named_nodes_parents[next_] + if isinstance(node, (ObservedRV, + MultiObservedRV)) + and (node, size) not in drawn]) continue else: # If the node does not have a givens value, try to draw it. diff --git a/pymc3/tests/test_sampling.py b/pymc3/tests/test_sampling.py index cca560cad1..1febe0a34c 100644 --- a/pymc3/tests/test_sampling.py +++ b/pymc3/tests/test_sampling.py @@ -302,6 +302,37 @@ def test_model_not_drawable_prior(self): samples = pm.sample_posterior_predictive(trace, 50) assert samples['foo'].shape == (50, 200) + def test_deterministic_of_observed(self): + meas_in_1 = pm.theanof.floatX(2 + 4 * np.random.randn(100)) + meas_in_2 = pm.theanof.floatX(5 + 4 * np.random.randn(100)) + with pm.Model() as model: + mu_in_1 = pm.Normal('mu_in_1', 0, 1) + sigma_in_1 = pm.HalfNormal('sd_in_1', 1) + mu_in_2 = pm.Normal('mu_in_2', 0, 1) + sigma_in_2 = pm.HalfNormal('sd__in_2', 1) + + in_1 = pm.Normal('in_1', mu_in_1, sigma_in_1, observed=meas_in_1) + in_2 = pm.Normal('in_2', mu_in_2, sigma_in_2, observed=meas_in_2) + out_diff = in_1 + in_2 + pm.Deterministic('out', out_diff) + + trace = pm.sample(100) + ppc_trace = pm.trace_to_dataframe( + trace, + varnames=[n for n in trace.varnames + if n != 'out'] + ).to_dict('records') + ppc = pm.sample_posterior_predictive(model=model, + trace=ppc_trace, + samples=len(ppc_trace), + vars=(model.deterministics + + model.basic_RVs)) + + rtol = 1e-5 if theano.config.floatX == 'float64' else 1e-3 + assert np.allclose(ppc['in_1'] + ppc['in_2'], + ppc['out'], + rtol=rtol) + class TestSamplePPCW(SeededTest): def test_sample_posterior_predictive_w(self):