Skip to content

Commit

Permalink
MAINT Revert order of tau and sd in all Normal likelihoods as well as…
Browse files Browse the repository at this point in the history
… add cov to multivariate normal.
  • Loading branch information
twiecki committed Sep 27, 2016
1 parent e9ee56a commit c975da6
Show file tree
Hide file tree
Showing 3 changed files with 83 additions and 27 deletions.
43 changes: 22 additions & 21 deletions pymc3/distributions/continuous.py
Expand Up @@ -174,13 +174,13 @@ class Normal(Continuous):
----------
mu : float
Mean.
tau : float
Precision (tau > 0).
sd : float
Standard deviation (sd > 0).
tau : float
Precision (tau > 0).
"""

def __init__(self, mu=0.0, tau=None, sd=None, *args, **kwargs):
def __init__(self, mu=0.0, sd=None, tau=None, *args, **kwargs):
super(Normal, self).__init__(*args, **kwargs)
self.mean = self.median = self.mode = self.mu = mu
self.tau, self.sd = get_tau_sd(tau=tau, sd=sd)
Expand Down Expand Up @@ -219,21 +219,21 @@ class HalfNormal(PositiveContinuous):
Parameters
----------
tau : float
Precision (tau > 0).
sd : float
Standard deviation (sd > 0).
tau : float
Precision (tau > 0).
"""

def __init__(self, tau=None, sd=None, *args, **kwargs):
def __init__(self, sd=None, tau=None, *args, **kwargs):
super(HalfNormal, self).__init__(*args, **kwargs)
self.tau, self.sd = get_tau_sd(tau=tau, sd=sd)
self.mean = tt.sqrt(2 / (np.pi * self.tau))
self.variance = (1. - 2 / np.pi) / self.tau

def random(self, point=None, size=None, repeat=None):
tau = draw_values([self.tau], point=point)
return generate_samples(stats.halfnorm.rvs, loc=0., scale=tau**-0.5,
sd = draw_values([self.sd], point=point)
return generate_samples(stats.halfnorm.rvs, loc=0., scale=sd,
dist_shape=self.shape,
size=size)

Expand Down Expand Up @@ -382,7 +382,7 @@ class Beta(UnitContinuous):
\alpha &= \mu \kappa \\
\beta &= (1 - \mu) \kappa
\text{where } \kappa = \frac{\mu(1-\mu)}{\sigma^2} - 1
\text{where } \kappa = \frac{\mu(1-\mu)}{\sigma^2} - 1
Parameters
----------
Expand Down Expand Up @@ -554,15 +554,16 @@ class Lognormal(PositiveContinuous):
Scale parameter (tau > 0).
"""

def __init__(self, mu=0, tau=1, *args, **kwargs):
def __init__(self, mu=0, sd=None, tau=None, *args, **kwargs):
super(Lognormal, self).__init__(*args, **kwargs)

self.mu = mu
self.tau = tau
self.mean = tt.exp(mu + 1. / (2 * tau))
self.tau, self.sd = get_tau_sd(tau=tau, sd=sd)

self.mean = tt.exp(mu + 1. / (2 * self.tau))
self.median = tt.exp(mu)
self.mode = tt.exp(mu - 1. / tau)
self.variance = (tt.exp(1. / tau) - 1) * tt.exp(2 * mu + 1. / tau)
self.mode = tt.exp(mu - 1. / self.tau)
self.variance = (tt.exp(1. / self.tau) - 1) * tt.exp(2 * mu + 1. / self.tau)

def _random(self, mu, tau, size=None):
samples = np.random.normal(size=size)
Expand Down Expand Up @@ -1199,7 +1200,7 @@ class VonMises(Continuous):
R"""
Univariate VonMises log-likelihood.
.. math::
f(x \mid \mu, \kappa) =
f(x \mid \mu, \kappa) =
\frac{e^{\kappa\cos(x-\mu)}}{2\pi I_0(\kappa)}
where :I_0 is the modified Bessel function of order 0.
Expand Down Expand Up @@ -1244,7 +1245,7 @@ class SkewNormal(Continuous):
R"""
Univariate skew-normal log-likelihood.
.. math::
f(x \mid \mu, \tau, \alpha) =
f(x \mid \mu, \tau, \alpha) =
2 \Phi((x-\mu)\sqrt{\tau}\alpha) \phi(x,\mu,\tau)
======== ==========================================
Support :math:`x \in \mathbb{R}`
Expand All @@ -1266,13 +1267,13 @@ class SkewNormal(Continuous):
Alternative scale parameter (tau > 0).
alpha : float
Skewness parameter.
Notes
-----
When alpha=0 we recover the Normal distribution and mu becomes the mean,
tau the precision and sd the standard deviation. In the limit of alpha
approaching plus/minus infinite we get a half-normal distribution.
When alpha=0 we recover the Normal distribution and mu becomes the mean,
tau the precision and sd the standard deviation. In the limit of alpha
approaching plus/minus infinite we get a half-normal distribution.
"""
def __init__(self, mu=0.0, sd=None, tau=None, alpha=1, *args, **kwargs):
super(SkewNormal, self).__init__(*args, **kwargs)
Expand Down
46 changes: 41 additions & 5 deletions pymc3/distributions/multivariate.py
Expand Up @@ -20,6 +20,41 @@
__all__ = ['MvNormal', 'MvStudentT', 'Dirichlet',
'Multinomial', 'Wishart', 'WishartBartlett', 'LKJCorr']

def get_tau_cov(mu, tau=None, cov=None):
"""
Find precision and standard deviation
.. math::
\Tau = \Sigma^-1
Parameters
----------
mu : array-like
tau : array-like, optional
cov : array-like, optional
Results
-------
Returns tuple (tau, sd)
Notes
-----
If neither tau nor cov is provided, returns an identity matrix.
"""
if tau is None:
if cov is None:
cov = np.eye(len(mu))
tau = np.eye(len(mu))
else:
tau = tt.nlinalg.matrix_inverse(cov)

else:
if cov is not None:
raise ValueError("Can't pass both tau and sd")
else:
cov = tt.nlinalg.matrix_inverse(cov)

return (tau, cov)

class MvNormal(Continuous):
R"""
Expand All @@ -41,25 +76,26 @@ class MvNormal(Continuous):
----------
mu : array
Vector of means.
cov : array
Covariance matrix.
tau : array
Precision matrix.
"""

def __init__(self, mu, tau, *args, **kwargs):
def __init__(self, mu, cov=None, tau=None, *args, **kwargs):
super(MvNormal, self).__init__(*args, **kwargs)
self.mean = self.median = self.mode = self.mu = mu
self.tau = tau
self.tau, self.cov = get_tau_cov(mu, tau=tau, cov=cov)

def random(self, point=None, size=None):
mu, tau = draw_values([self.mu, self.tau], point=point)
mu, cov = draw_values([self.mu, self.cov], point=point)

def _random(mean, cov, size=None):
# FIXME: cov here is actually precision?
return stats.multivariate_normal.rvs(
mean, cov, None if size == mean.shape else size)

samples = generate_samples(_random,
mean=mu, cov=tau,
mean=mu, cov=cov,
dist_shape=self.shape,
broadcast_shape=mu.shape,
size=size)
Expand Down
21 changes: 20 additions & 1 deletion pymc3/tests/test_distributions.py
Expand Up @@ -13,7 +13,7 @@
Geometric, Exponential, ExGaussian, Normal, Flat, LKJCorr, Wald,
ChiSquared, HalfNormal, DiscreteUniform, Bound, Uniform,
Binomial, Wishart, SkewNormal)
from ..distributions import continuous
from ..distributions import continuous, multivariate
from numpy import array, inf, log, exp
from numpy.testing import assert_almost_equal
import numpy.random as nr
Expand Down Expand Up @@ -495,6 +495,10 @@ def check_mvnormal(self, n):
self.pymc3_matches_scipy(MvNormal, Vector(R, n),
{'mu': Vector(R, n), 'tau': PdMatrix(n)}, normal_logpdf)

def check_mvnormal_cov(self, n):
self.pymc3_matches_scipy(MvNormal, Vector(R, n),
{'mu': Vector(R, n), 'cov': PdMatrix(n)}, normal_logpdf)

def test_mvt(self):
for n in [1, 2]:
yield self.check_mvt, n
Expand Down Expand Up @@ -583,6 +587,21 @@ def test_get_tau_sd(self):
sd = np.array([2])
assert_almost_equal(continuous.get_tau_sd(sd=sd), [1. / sd**2, sd])

def test_get_tau_cov(self):
cov = np.random.randn(3, 3)
cov = np.dot(cov, cov.T)
mu = np.ones(3)
tau, cov = multivariate.get_tau_cov(mu, cov=cov)
assert_almost_equal(tau.eval(),
np.linalg.inv(cov)
)

tau, cov = multivariate.get_tau_cov(mu)
assert_almost_equal(cov,
np.eye(3)
)


def test_ex_gaussian(self):
# Log probabilities calculated using the dexGAUS function from the R package gamlss.
# See e.g., doi: 10.1111/j.1467-9876.2005.00510.x, or
Expand Down

0 comments on commit c975da6

Please sign in to comment.