Skip to content
Browse files

More cosmetic changes in GMM.

  • Loading branch information...
1 parent e76eec5 commit 1f9f96577927c59fd8ae0f5ae57795e9117f0a79 @fabianp fabianp committed Feb 23, 2011
Showing with 19 additions and 12 deletions.
  1. +10 −7 scikits/learn/mixture.py
  2. +9 −5 scikits/learn/tests/test_mixture.py
View
17 scikits/learn/mixture.py
@@ -79,32 +79,35 @@ def lmvnpdf(obs, means, covars, cvtype='diag'):
return lmvnpdf_dict[cvtype](obs, means, covars)
-def sample_gaussian(mean, covar, cvtype='diag', n=1):
+def sample_gaussian(mean, covar, cvtype='diag', n_samples=1):
"""Generate random samples from a Gaussian distribution.
Parameters
----------
mean : array_like, shape (n_features,)
Mean of the distribution.
- covars : array_like
+
+ covars : array_like, optional
Covariance of the distribution. The shape depends on `cvtype`:
scalar if 'spherical',
(D) if 'diag',
(D, D) if 'tied', or 'full'
- cvtype : string
+
+ cvtype : string, optional
Type of the covariance parameters. Must be one of
'spherical', 'tied', 'diag', 'full'. Defaults to 'diag'.
- n : int
- Number of samples to generate.
+
+ n_samples : int, optional
+ Number of samples to generate. Defaults to 1.
Returns
-------
obs : array, shape (n_features, n)
Randomly generated sample
"""
ndim = len(mean)
- rand = np.random.randn(ndim, n)
- if n == 1:
+ rand = np.random.randn(ndim, n_samples)
+ if n_samples == 1:
rand.shape = (ndim,)
if cvtype == 'spherical':
View
14 scikits/learn/tests/test_mixture.py
@@ -62,22 +62,26 @@ def test_sample_gaussian():
mu = np.random.randint(10) * np.random.rand(n_features)
cv = (np.random.rand(n_features) + 1.0) ** 2
- samples = mixture.sample_gaussian(mu, cv, cvtype='diag', n=n_samples)
+ samples = mixture.sample_gaussian(
+ mu, cv, cvtype='diag', n_samples=n_samples)
assert np.allclose(samples.mean(axis), mu, atol=0.3)
assert np.allclose(samples.var(axis), cv, atol=0.5)
# the same for spherical covariances
cv = (np.random.rand() + 1.0) ** 2
- samples = mixture.sample_gaussian(mu, cv, cvtype='spherical', n=n_samples)
+ samples = mixture.sample_gaussian(
+ mu, cv, cvtype='spherical', n_samples=n_samples)
assert np.allclose(samples.mean(axis), mu, atol=0.3)
- assert np.allclose(samples.var(axis), np.repeat(cv, n_features), atol=0.5)
+ assert np.allclose(
+ samples.var(axis), np.repeat(cv, n_features), atol=0.5)
# and for full covariances
A = np.random.randn(n_features, n_features)
cv = np.dot(A.T, A) + np.eye(n_features)
- samples = mixture.sample_gaussian(mu, cv, cvtype='full', n=n_samples)
+ samples = mixture.sample_gaussian(
+ mu, cv, cvtype='full', n_samples=n_samples)
assert np.allclose(samples.mean(axis), mu, atol=0.3)
assert np.allclose(np.cov(samples), cv, atol=1.)
@@ -219,7 +223,7 @@ def test_train(self, params='wmc'):
g._covars = 20 * self.covars[self.cvtype]
# Create a training set by sampling from the predefined distribution.
- train_obs = g.rvs(n=100)
+ train_obs = g.rvs(n_samples=100)
g.fit(train_obs, n_iter=0, init_params=params)

0 comments on commit 1f9f965

Please sign in to comment.
Something went wrong with that request. Please try again.