From 18a173e55b5817c29d57c447b5c1a0927ceb1899 Mon Sep 17 00:00:00 2001 From: Thomas Wiecki Date: Tue, 27 Sep 2016 10:02:24 +0200 Subject: [PATCH 1/2] MAINT Revert order of tau and sd in all Normal likelihoods as well as add cov to multivariate normal. --- pymc3/distributions/continuous.py | 67 ++++++++++++++++-------- pymc3/distributions/multivariate.py | 58 ++++++++++++++++---- pymc3/tests/models.py | 9 ++-- pymc3/tests/test_distributions.py | 21 +++++++- pymc3/tests/test_distributions_random.py | 12 ++--- 5 files changed, 125 insertions(+), 42 deletions(-) diff --git a/pymc3/distributions/continuous.py b/pymc3/distributions/continuous.py index 93ea9265c9..048044262c 100644 --- a/pymc3/distributions/continuous.py +++ b/pymc3/distributions/continuous.py @@ -174,18 +174,40 @@ class Normal(Continuous): ---------- mu : float Mean. - tau : float - Precision (tau > 0). sd : float Standard deviation (sd > 0). + tau : float + Precision (tau > 0). """ - def __init__(self, mu=0.0, tau=None, sd=None, *args, **kwargs): - super(Normal, self).__init__(*args, **kwargs) + def __init__(self, *args, **kwargs): + # FIXME In order to catch the case where Normal('x', 0, .1) is + # called to display a warning we have to fetch the args and + # kwargs manually. After a certain period we should revert + # back to the old calling signature. + + if len(args) == 1: + mu = args[0] + sd = kwargs.pop('sd', None) + tau = kwargs.pop('tau', None) + elif len(args) == 2: + warnings.warn(('The order of positional arguments to Normal()' + 'has changed. The new signature is:' + 'Normal(name, mu, sd) instead of Normal(name, mu, tau).'), + DeprecationWarning) + mu, sd = args + tau = kwargs.pop('tau', None) + else: + mu = kwargs.pop('mu', 0.) + sd = kwargs.pop('sd', None) + tau = kwargs.pop('tau', None) + self.mean = self.median = self.mode = self.mu = mu self.tau, self.sd = get_tau_sd(tau=tau, sd=sd) self.variance = 1. / self.tau + super(Normal, self).__init__(**kwargs) + def random(self, point=None, size=None, repeat=None): mu, tau, sd = draw_values([self.mu, self.tau, self.sd], point=point) @@ -219,21 +241,21 @@ class HalfNormal(PositiveContinuous): Parameters ---------- - tau : float - Precision (tau > 0). sd : float Standard deviation (sd > 0). + tau : float + Precision (tau > 0). """ - def __init__(self, tau=None, sd=None, *args, **kwargs): + def __init__(self, sd=None, tau=None, *args, **kwargs): super(HalfNormal, self).__init__(*args, **kwargs) self.tau, self.sd = get_tau_sd(tau=tau, sd=sd) self.mean = tt.sqrt(2 / (np.pi * self.tau)) self.variance = (1. - 2 / np.pi) / self.tau def random(self, point=None, size=None, repeat=None): - tau = draw_values([self.tau], point=point) - return generate_samples(stats.halfnorm.rvs, loc=0., scale=tau**-0.5, + sd = draw_values([self.sd], point=point) + return generate_samples(stats.halfnorm.rvs, loc=0., scale=sd, dist_shape=self.shape, size=size) @@ -382,7 +404,7 @@ class Beta(UnitContinuous): \alpha &= \mu \kappa \\ \beta &= (1 - \mu) \kappa - \text{where } \kappa = \frac{\mu(1-\mu)}{\sigma^2} - 1 + \text{where } \kappa = \frac{\mu(1-\mu)}{\sigma^2} - 1 Parameters ---------- @@ -554,15 +576,16 @@ class Lognormal(PositiveContinuous): Scale parameter (tau > 0). """ - def __init__(self, mu=0, tau=1, *args, **kwargs): + def __init__(self, mu=0, sd=None, tau=None, *args, **kwargs): super(Lognormal, self).__init__(*args, **kwargs) self.mu = mu - self.tau = tau - self.mean = tt.exp(mu + 1. / (2 * tau)) + self.tau, self.sd = get_tau_sd(tau=tau, sd=sd) + + self.mean = tt.exp(mu + 1. / (2 * self.tau)) self.median = tt.exp(mu) - self.mode = tt.exp(mu - 1. / tau) - self.variance = (tt.exp(1. / tau) - 1) * tt.exp(2 * mu + 1. / tau) + self.mode = tt.exp(mu - 1. / self.tau) + self.variance = (tt.exp(1. / self.tau) - 1) * tt.exp(2 * mu + 1. / self.tau) def _random(self, mu, tau, size=None): samples = np.random.normal(size=size) @@ -1199,7 +1222,7 @@ class VonMises(Continuous): R""" Univariate VonMises log-likelihood. .. math:: - f(x \mid \mu, \kappa) = + f(x \mid \mu, \kappa) = \frac{e^{\kappa\cos(x-\mu)}}{2\pi I_0(\kappa)} where :I_0 is the modified Bessel function of order 0. @@ -1244,7 +1267,7 @@ class SkewNormal(Continuous): R""" Univariate skew-normal log-likelihood. .. math:: - f(x \mid \mu, \tau, \alpha) = + f(x \mid \mu, \tau, \alpha) = 2 \Phi((x-\mu)\sqrt{\tau}\alpha) \phi(x,\mu,\tau) ======== ========================================== Support :math:`x \in \mathbb{R}` @@ -1266,13 +1289,13 @@ class SkewNormal(Continuous): Alternative scale parameter (tau > 0). alpha : float Skewness parameter. - + Notes ----- - When alpha=0 we recover the Normal distribution and mu becomes the mean, - tau the precision and sd the standard deviation. In the limit of alpha - approaching plus/minus infinite we get a half-normal distribution. - + When alpha=0 we recover the Normal distribution and mu becomes the mean, + tau the precision and sd the standard deviation. In the limit of alpha + approaching plus/minus infinite we get a half-normal distribution. + """ def __init__(self, mu=0.0, sd=None, tau=None, alpha=1, *args, **kwargs): super(SkewNormal, self).__init__(*args, **kwargs) diff --git a/pymc3/distributions/multivariate.py b/pymc3/distributions/multivariate.py index 2b39397370..cbf0e87812 100755 --- a/pymc3/distributions/multivariate.py +++ b/pymc3/distributions/multivariate.py @@ -20,6 +20,41 @@ __all__ = ['MvNormal', 'MvStudentT', 'Dirichlet', 'Multinomial', 'Wishart', 'WishartBartlett', 'LKJCorr'] +def get_tau_cov(mu, tau=None, cov=None): + """ + Find precision and standard deviation + + .. math:: + \Tau = \Sigma^-1 + + Parameters + ---------- + mu : array-like + tau : array-like, optional + cov : array-like, optional + + Results + ------- + Returns tuple (tau, sd) + + Notes + ----- + If neither tau nor cov is provided, returns an identity matrix. + """ + if tau is None: + if cov is None: + cov = np.eye(len(mu)) + tau = np.eye(len(mu)) + else: + tau = tt.nlinalg.matrix_inverse(cov) + + else: + if cov is not None: + raise ValueError("Can't pass both tau and sd") + else: + cov = tt.nlinalg.matrix_inverse(tau) + + return (tau, cov) class MvNormal(Continuous): R""" @@ -41,25 +76,30 @@ class MvNormal(Continuous): ---------- mu : array Vector of means. - tau : array + cov : array, optional + Covariance matrix. + tau : array, optional Precision matrix. """ - def __init__(self, mu, tau, *args, **kwargs): + def __init__(self, mu, cov=None, tau=None, *args, **kwargs): super(MvNormal, self).__init__(*args, **kwargs) + warnings.warn(('The calling signature of MvNormal() has changed. The new signature is:\n' + 'MvNormal(name, mu, cov) instead of MvNormal(name, mu, tau).' + 'You do not need to explicitly invert the covariance matrix anymore.'), + DeprecationWarning) self.mean = self.median = self.mode = self.mu = mu - self.tau = tau + self.tau, self.cov = get_tau_cov(mu, tau=tau, cov=cov) def random(self, point=None, size=None): - mu, tau = draw_values([self.mu, self.tau], point=point) + mu, cov = draw_values([self.mu, self.cov], point=point) def _random(mean, cov, size=None): - # FIXME: cov here is actually precision? return stats.multivariate_normal.rvs( mean, cov, None if size == mean.shape else size) samples = generate_samples(_random, - mean=mu, cov=tau, + mean=mu, cov=cov, dist_shape=self.shape, broadcast_shape=mu.shape, size=size) @@ -352,9 +392,9 @@ def __init__(self, n, V, *args, **kwargs): warnings.warn('The Wishart distribution can currently not be used ' 'for MCMC sampling. The probability of sampling a ' 'symmetric matrix is basically zero. Instead, please ' - 'use the LKJCorr prior. For more information on the ' - 'issues surrounding the Wishart see here: ' - 'https://github.com/pymc-devs/pymc3/issues/538.', + 'use WishartBartlett or better yet, LKJCorr.' + 'For more information on the issues surrounding the ' + 'Wishart see here: https://github.com/pymc-devs/pymc3/issues/538.', UserWarning) self.n = n self.p = p = V.shape[0] diff --git a/pymc3/tests/models.py b/pymc3/tests/models.py index b1c04da24d..20c4d750d9 100644 --- a/pymc3/tests/models.py +++ b/pymc3/tests/models.py @@ -9,7 +9,7 @@ def simple_model(): mu = -2.1 tau = 1.3 with Model() as model: - Normal('x', mu, tau, shape=2, testval=[.1] * 2) + Normal('x', mu, tau=tau, shape=2, testval=[.1] * 2) return model.test_point, model, (mu, tau ** -1) @@ -18,7 +18,7 @@ def multidimensional_model(): mu = -2.1 tau = 1.3 with Model() as model: - Normal('x', mu, tau, shape=(3, 2), testval=.1 * np.ones((3, 2))) + Normal('x', mu, tau=tau, shape=(3, 2), testval=.1 * np.ones((3, 2))) return model.test_point, model, (mu, tau ** -1) @@ -34,7 +34,7 @@ def simple_2model(): tau = 1.3 p = .4 with Model() as model: - x = pm.Normal('x', mu, tau, testval=.1) + x = pm.Normal('x', mu, tau=tau, testval=.1) pm.Deterministic('logx', log(x)) pm.Bernoulli('y', p) return model.test_point, model @@ -48,7 +48,8 @@ def mv_simple(): [1., -0.05, 5.5]]) tau = np.dot(p, p.T) with pm.Model() as model: - pm.MvNormal('x', pm.constant(mu), pm.constant(tau), shape=3, testval=np.array([.1, 1., .8])) + pm.MvNormal('x', pm.constant(mu), tau=pm.constant(tau), + shape=3, testval=np.array([.1, 1., .8])) H = tau C = np.linalg.inv(H) return model.test_point, model, (mu, C) diff --git a/pymc3/tests/test_distributions.py b/pymc3/tests/test_distributions.py index 66d1fb38d7..92a66c79e3 100644 --- a/pymc3/tests/test_distributions.py +++ b/pymc3/tests/test_distributions.py @@ -13,7 +13,7 @@ Geometric, Exponential, ExGaussian, Normal, Flat, LKJCorr, Wald, ChiSquared, HalfNormal, DiscreteUniform, Bound, Uniform, Binomial, Wishart, SkewNormal) -from ..distributions import continuous +from ..distributions import continuous, multivariate from numpy import array, inf, log, exp from numpy.testing import assert_almost_equal import numpy.random as nr @@ -495,6 +495,10 @@ def check_mvnormal(self, n): self.pymc3_matches_scipy(MvNormal, Vector(R, n), {'mu': Vector(R, n), 'tau': PdMatrix(n)}, normal_logpdf) + def check_mvnormal_cov(self, n): + self.pymc3_matches_scipy(MvNormal, Vector(R, n), + {'mu': Vector(R, n), 'cov': PdMatrix(n)}, normal_logpdf) + def test_mvt(self): for n in [1, 2]: yield self.check_mvt, n @@ -583,6 +587,21 @@ def test_get_tau_sd(self): sd = np.array([2]) assert_almost_equal(continuous.get_tau_sd(sd=sd), [1. / sd**2, sd]) + def test_get_tau_cov(self): + cov = np.random.randn(3, 3) + cov = np.dot(cov, cov.T) + mu = np.ones(3) + tau, cov = multivariate.get_tau_cov(mu, cov=cov) + assert_almost_equal(tau.eval(), + np.linalg.inv(cov) + ) + + tau, cov = multivariate.get_tau_cov(mu) + assert_almost_equal(cov, + np.eye(3) + ) + + def test_ex_gaussian(self): # Log probabilities calculated using the dexGAUS function from the R package gamlss. # See e.g., doi: 10.1111/j.1467-9876.2005.00510.x, or diff --git a/pymc3/tests/test_distributions_random.py b/pymc3/tests/test_distributions_random.py index 627daf3f5b..cf2500f472 100644 --- a/pymc3/tests/test_distributions_random.py +++ b/pymc3/tests/test_distributions_random.py @@ -257,7 +257,7 @@ def check(self, dist, **kwargs): def test_normal(self): self.check(Normal, mu=0., tau=1.) - + def test_skew_normal(self): self.check(SkewNormal, mu=0., sd=1., alpha=5.) @@ -362,7 +362,7 @@ def check(self, dist, **kwargs): def test_normal(self): self.check(Normal, mu=self.zeros, tau=self.ones) - + def test_skew_normal(self): self.check(SkewNormal, mu=self.zeros, sd=self.ones, alpha=self.ones * 5) @@ -481,7 +481,7 @@ def check(self, dist, **kwargs): def test_normal(self): self.check(Normal, mu=self.zeros, tau=self.ones) - + def test_skew_normal(self): self.check(SkewNormal, mu=self.zeros, sd=self.ones, alpha=self.ones * 5) @@ -766,10 +766,10 @@ def ref_rand(size, c): pymc3_random_discrete(ConstantDist, {'c': I}, ref_rand=ref_rand) def test_mv_normal(self): - def ref_rand(size, mu, tau): - return st.multivariate_normal.rvs(mean=mu, cov=tau, size=size) + def ref_rand(size, mu, cov): + return st.multivariate_normal.rvs(mean=mu, cov=cov, size=size) for n in [2, 3]: - pymc3_random(MvNormal, {'mu': Vector(R, n), 'tau': PdMatrix(n)}, + pymc3_random(MvNormal, {'mu': Vector(R, n), 'cov': PdMatrix(n)}, size=100, valuedomain=Vector(R, n), ref_rand=ref_rand) def test_mv_t(self): From 3d1a702c79915b7741230715731ae0cffa2005de Mon Sep 17 00:00:00 2001 From: Thomas Wiecki Date: Tue, 27 Sep 2016 12:44:44 +0200 Subject: [PATCH 2/2] DOC Update examples to use tau explicitly where it relied on positional arguments before. DOC Add new calling signatures to release notes. --- RELEASE-NOTES.md | 48 ++++++++++++++------- docs/source/notebooks/dp_mix.ipynb | 7 +-- docs/source/notebooks/pmf-pymc.ipynb | 5 ++- docs/source/notebooks/rugby_analytics.ipynb | 9 ++-- pymc3/examples/LKJ_correlation.py | 4 +- pymc3/examples/gelman_bioassay.py | 4 +- pymc3/examples/lasso_missing.py | 2 +- 7 files changed, 49 insertions(+), 30 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 9f634b8922..15eeb83353 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -3,9 +3,11 @@ We are proud and excited to release the first stable version of PyMC3, the product of more than [5 years](https://github.com/pymc-devs/pymc3/commit/85c7e06b6771c0d99cbc09cb68885cda8f7785cb) of ongoing development and contributions from over 80 individuals. PyMC3 is a Python module for Bayesian modeling which focuses on modern Bayesian computational methods, primarily gradient-based (Hamiltonian) MCMC sampling and variational inference. Models are specified in Python, which allows for great flexibility. The main technological difference in PyMC3 relative to previous versions is the reliance on Theano for the computational backend, rather than on Fortran extensions. +### New features + Since the beta release last year, the following improvements have been implemented: -* Added `variational` submodule, which features the automatic differentiation variational inference (ADVI) fitting method. Much of this work was due to the efforts of Taku Yoshioka, and important guidance was provided by the Stan team (specifically Alp Kucukelbir and Daniel Lee). +* Added `variational` submodule, which features the automatic differentiation variational inference (ADVI) fitting method. Also supports mini-batch ADVI for large data sets. Much of this work was due to the efforts of Taku Yoshioka, and important guidance was provided by the Stan team (specifically Alp Kucukelbir and Daniel Lee). * Added model checking utility functions, including leave-one-out (LOO) cross-validation, BPIC, WAIC, and DIC. @@ -21,15 +23,29 @@ Since the beta release last year, the following improvements have been implement * Refactored test suite for better efficiency. -* Added von Mises, zero-inflated negative binomial, and Lewandowski, Kurowicka and Joe (LKJ) distributions. +* Added von Mises, zero-inflated negative binomial, and Lewandowski, Kurowicka and Joe (LKJ) distributions. * Adopted `joblib` for managing parallel computation of chains. * Added contributor guidelines, contributor code of conduct and governance document. -We on the PyMC3 core team would like to thank everyone for contributing and now feel that this is ready for the big time. We look forward to hearing about all the cool stuff you use PyMC3 for, and look forward to continued development on the package. +### Deprecations + +* Argument order of tau and sd was switched for distributions of the normal family: +- `Normal()` +- `Lognormal()` +- `HalfNormal()` + +Old: `Normal(name, mu, tau)` +New: `Normal(name, mu, sd)` (supplying keyword arguments is unaffected). + +* `MvNormal` calling signature changed: +Old: `MvNormal(name, mu, tau)` +New: `MvNormal(name, mu, cov)` (supplying keyword arguments is unaffected). + +We on the PyMC3 core team would like to thank everyone for contributing and now feel that this is ready for the big time. We look forward to hearing about all the cool stuff you use PyMC3 for, and look forward to continued development on the package. -## Contributors +### Contributors A Kuz A. Flaxman @@ -38,48 +54,48 @@ Alexey Goldin Anand Patil Andrea Zonca Andreas Klostermann -Andres Asensio Ramos +Andres Asensio Ramos Andrew Clegg -Anjum48 +Anjum48 AustinRochford Benjamin Edwards Boris Avdeev Brian Naughton -Byron Smith +Byron Smith Chad Heyne Chris Fonnesbeck -Colin +Colin Corey Farwell David Huard David Huard David Stück DeliciousHair -Dustin Tran +Dustin Tran Eigenblutwurst Gideon Wulfsohn Gil Raphaelli Gogs -Ilan Man +Ilan Man Imri Sofer Jake Biesinger James Webber John McDonnell John Salvatier -Jordi Diaz +Jordi Diaz Jordi Warmenhoven Karlson Pfannschmidt Kyle Bishop Kyle Meyer -Lin Xiao +Lin Xiao Mack Sweeney Matthew Emmett -Maxim +Maxim Michael Gallaspy Nick Osvaldo Martin Patricio Benavente Peadar Coyle (springcoil) -Raymond Roberts +Raymond Roberts Rodrigo Benenson Sergei Lebedev Skipper Seabold @@ -88,8 +104,8 @@ The Gitter Badger Thomas Kluyver Thomas Wiecki Tobias Knuth -Volodymyr -Volodymyr Kazantsev +Volodymyr +Volodymyr Kazantsev Wes McKinney Zach Ploskey akuz diff --git a/docs/source/notebooks/dp_mix.ipynb b/docs/source/notebooks/dp_mix.ipynb index e156bc7a1d..abfa4829cf 100644 --- a/docs/source/notebooks/dp_mix.ipynb +++ b/docs/source/notebooks/dp_mix.ipynb @@ -580,8 +580,8 @@ "\n", " tau = pm.Gamma('tau', 1., 1., shape=K)\n", " lambda_ = pm.Uniform('lambda', 0, 5, shape=K)\n", - " mu = pm.Normal('mu', 0, lambda_ * tau, shape=K)\n", - " obs = pm.Normal('obs', mu[component], lambda_[component] * tau[component],\n", + " mu = pm.Normal('mu', 0, tau=lambda_ * tau, shape=K)\n", + " obs = pm.Normal('obs', mu[component], tau=lambda_[component] * tau[component],\n", " observed=old_faithful_df.std_waiting.values)" ] }, @@ -1188,8 +1188,9 @@ } ], "metadata": { + "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python 3", + "display_name": "Python [default]", "language": "python", "name": "python3" }, diff --git a/docs/source/notebooks/pmf-pymc.ipynb b/docs/source/notebooks/pmf-pymc.ipynb index bc05f5f8b0..70f09c2eb2 100644 --- a/docs/source/notebooks/pmf-pymc.ipynb +++ b/docs/source/notebooks/pmf-pymc.ipynb @@ -1529,8 +1529,9 @@ } ], "metadata": { + "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python 3", + "display_name": "Python [default]", "language": "python", "name": "python3" }, @@ -1544,7 +1545,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.1" + "version": "3.5.2" } }, "nbformat": 4, diff --git a/docs/source/notebooks/rugby_analytics.ipynb b/docs/source/notebooks/rugby_analytics.ipynb index 11f101a068..a78b983d5a 100644 --- a/docs/source/notebooks/rugby_analytics.ipynb +++ b/docs/source/notebooks/rugby_analytics.ipynb @@ -243,10 +243,10 @@ "model = pm.Model()\n", "with pm.Model() as model:\n", " # global model parameters\n", - " home = pm.Normal('home', 0, .0001)\n", + " home = pm.Normal('home', 0, tau=.0001)\n", " tau_att = pm.Gamma('tau_att', .1, .1)\n", " tau_def = pm.Gamma('tau_def', .1, .1)\n", - " intercept = pm.Normal('intercept', 0, .0001)\n", + " intercept = pm.Normal('intercept', 0, tau=.0001)\n", " \n", " # team-specific model parameters\n", " atts_star = pm.Normal(\"atts_star\", \n", @@ -464,8 +464,9 @@ } ], "metadata": { + "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python 3", + "display_name": "Python [default]", "language": "python", "name": "python3" }, @@ -479,7 +480,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.1" + "version": "3.5.2" } }, "nbformat": 4, diff --git a/pymc3/examples/LKJ_correlation.py b/pymc3/examples/LKJ_correlation.py index 03b29481a5..3d87839728 100644 --- a/pymc3/examples/LKJ_correlation.py +++ b/pymc3/examples/LKJ_correlation.py @@ -34,7 +34,7 @@ with Model() as model: - mu = Normal('mu', mu=0, tau=1 ** -2, shape=n_var) + mu = Normal('mu', mu=0, sd=1, shape=n_var) # We can specify separate priors for sigma and the correlation matrix: sigma = Uniform('sigma', shape=n_var) @@ -44,7 +44,7 @@ cov_matrix = tt.diag(sigma).dot(corr_matrix.dot(tt.diag(sigma))) - like = MvNormal('likelihood', mu=mu, tau=inv(cov_matrix), observed=dataset) + like = MvNormal('likelihood', mu=mu, cov=cov_matrix, observed=dataset) def run(n=1000): diff --git a/pymc3/examples/gelman_bioassay.py b/pymc3/examples/gelman_bioassay.py index e45f3c784f..bc0456a2a6 100644 --- a/pymc3/examples/gelman_bioassay.py +++ b/pymc3/examples/gelman_bioassay.py @@ -9,8 +9,8 @@ with Model() as model: # Logit-linear model parameters - alpha = Normal('alpha', 0, 0.01) - beta = Normal('beta', 0, 0.01) + alpha = Normal('alpha', 0, tau=0.01) + beta = Normal('beta', 0, tau=0.01) # Calculate probabilities of death theta = Deterministic('theta', invlogit(alpha + beta * dose)) diff --git a/pymc3/examples/lasso_missing.py b/pymc3/examples/lasso_missing.py index a786a05796..d0704d7855 100644 --- a/pymc3/examples/lasso_missing.py +++ b/pymc3/examples/lasso_missing.py @@ -36,7 +36,7 @@ beta[4] * age + beta[5] * mother_imp + beta[6] * early_ident) observed_score = Normal( - 'observed_score', expected_score, s ** -2, observed=score) + 'observed_score', expected_score, s, observed=score) with model: