-
-
Notifications
You must be signed in to change notification settings - Fork 2k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
6 changed files
with
449 additions
and
139 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1,3 @@ | ||
from . import cov | ||
from . import mean | ||
from .gp import GP, sample_gp |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,128 @@ | ||
import numpy as np | ||
from scipy import stats | ||
from tqdm import tqdm | ||
|
||
from theano.tensor.nlinalg import matrix_inverse, det | ||
import theano.tensor as tt | ||
import theano | ||
|
||
from .mean import Zero | ||
from ..distributions import MvNormal, Continuous, draw_values, generate_samples | ||
from ..model import modelcontext | ||
|
||
|
||
__all__ = ['GP', 'sample_gp'] | ||
|
||
class GP(Continuous): | ||
"""Gausian process | ||
Parameters | ||
---------- | ||
mean_func : Mean | ||
Mean function of Gaussian process | ||
cov_func : Covariance | ||
Covariance function of Gaussian process | ||
sigma : scalar or array | ||
Observation noise (defaults to zero) | ||
""" | ||
def __init__(self, mean_func=None, cov_func=None, sigma=0, *args, **kwargs): | ||
super(GP, self).__init__(*args, **kwargs) | ||
|
||
if mean_func is None: | ||
self.M = Zero() | ||
else: | ||
self.M = mean_func | ||
|
||
if cov_func is None: | ||
raise ValueError('A covariance function must be specified for GPP') | ||
self.K = cov_func | ||
|
||
self.sigma = sigma | ||
|
||
def random(self, point=None, size=None, **kwargs): | ||
X = kwargs.pop('X') | ||
mu, cov = draw_values([self.M(X).squeeze(), self.K(X) + np.eye(X.shape[0])*self.sigma**2], point=point) | ||
|
||
def _random(mean, cov, size=None): | ||
return stats.multivariate_normal.rvs( | ||
mean, cov, None if size == mean.shape else size) | ||
|
||
samples = generate_samples(_random, | ||
mean=mu, cov=cov, | ||
dist_shape=self.shape, | ||
broadcast_shape=mu.shape, | ||
size=size) | ||
return samples | ||
|
||
def logp(self, X, Y): | ||
mu = self.M(X) | ||
Sigma = self.K(X) + tt.eye(X.shape[0])*self.sigma**2 | ||
|
||
return MvNormal.dist(mu, Sigma).logp(Y) | ||
|
||
|
||
def sample_gp(trace, gp, X_values, samples=None, obs_noise=True, model=None, random_seed=None, progressbar=True): | ||
"""Generate samples from a posterior Gaussian process. | ||
Parameters | ||
---------- | ||
trace : backend, list, or MultiTrace | ||
Trace generated from MCMC sampling. | ||
gp : Gaussian process object | ||
The GP variable to sample from. | ||
X_values : array | ||
Grid of values at which to sample GP. | ||
samples : int | ||
Number of posterior predictive samples to generate. Defaults to the | ||
length of `trace` | ||
obs_noise : bool | ||
Flag for including observation noise in sample. Defaults to True. | ||
model : Model | ||
Model used to generate `trace`. Optional if in `with` context manager. | ||
random_seed : integer > 0 | ||
Random number seed for sampling. | ||
progressbar : bool | ||
Flag for showing progress bar. | ||
Returns | ||
------- | ||
Array of samples from posterior GP evaluated at Z. | ||
""" | ||
model = modelcontext(model) | ||
|
||
if samples is None: | ||
samples = len(trace) | ||
|
||
if random_seed: | ||
np.random.seed(random_seed) | ||
|
||
if progressbar: | ||
indices = tqdm(np.random.randint(0, len(trace), samples), total=samples) | ||
else: | ||
indices = np.random.randint(0, len(trace), samples) | ||
|
||
K = gp.distribution.K | ||
|
||
data = [v for v in model.observed_RVs if v.name==gp.name][0].data | ||
|
||
X = data['X'] | ||
Y = data['Y'] | ||
Z = X_values | ||
|
||
S_xz = K(X, Z) | ||
S_zz = K(Z) | ||
if obs_noise: | ||
S_inv = matrix_inverse(K(X) + tt.eye(X.shape[0])*gp.distribution.sigma**2) | ||
else: | ||
S_inv = matrix_inverse(K(X)) | ||
|
||
# Posterior mean | ||
m_post = tt.dot(tt.dot(S_xz.T, S_inv), Y) | ||
# Posterior covariance | ||
S_post = S_zz - tt.dot(tt.dot(S_xz.T, S_inv), S_xz) | ||
|
||
gp_post = MvNormal.dist(m_post, S_post, shape=Z.shape[0]) | ||
|
||
samples = [gp_post.random(point=trace[idx]) for idx in indices] | ||
|
||
return np.array(samples) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
import theano.tensor as tt | ||
|
||
__all__ = ['Zero', 'Constant'] | ||
|
||
class Mean(object): | ||
""" | ||
Base class for mean functions | ||
""" | ||
|
||
def __call__(self, X): | ||
R""" | ||
Evaluate the mean function. | ||
Parameters | ||
---------- | ||
X : The training inputs to the mean function. | ||
""" | ||
raise NotImplementedError | ||
|
||
def __add__(self, other): | ||
return Add(self, other) | ||
|
||
def __mul__(self, other): | ||
return Prod(self, other) | ||
|
||
class Zero(Mean): | ||
def __call__(self, X): | ||
return tt.zeros(X.shape, dtype='float32').squeeze() | ||
|
||
class Constant(Mean): | ||
""" | ||
Constant mean function for Gaussian process. | ||
Parameters | ||
---------- | ||
c : variable, array or integer | ||
Constant mean value | ||
""" | ||
|
||
def __init__(self, c=0): | ||
Mean.__init__(self) | ||
self.c = c | ||
|
||
def __call__(self, X): | ||
return tt.tile(tt.stack(self.c), X.shape).squeeze() | ||
|
||
class Linear(Mean): | ||
|
||
def __init__(self, coeffs, intercept=0): | ||
""" | ||
Linear mean function for Gaussian process. | ||
Parameters | ||
---------- | ||
coeffs : variables | ||
Linear coefficients | ||
intercept : variable, array or integer | ||
Intercept for linear function (Defaults to zero) | ||
""" | ||
Mean.__init__(self) | ||
self.b = intercept | ||
self.A = coeffs | ||
|
||
def __call__(self, X): | ||
return tt.dot(X, self.A) + self.b | ||
|
||
|
||
class Add(Mean): | ||
def __init__(self, first_mean, second_mean): | ||
Mean.__init__(self) | ||
self.m1 = first_mean | ||
self.m2 = second_mean | ||
|
||
def __call__(self, X): | ||
return tt.add(self.m1(X), self.m2(X)) | ||
|
||
|
||
class Prod(Mean): | ||
def __init__(self, first_mean, second_mean): | ||
Mean.__init__(self) | ||
self.m1 = first_mean | ||
self.m2 = second_mean | ||
|
||
def __call__(self, X): | ||
return tt.mul(self.m1(X), self.m2(X)) | ||
|