Skip to content

Commit

Permalink
more restructuring
Browse files Browse the repository at this point in the history
  • Loading branch information
MarcusMNoack committed Mar 22, 2024
1 parent bd09870 commit eb23c47
Show file tree
Hide file tree
Showing 6 changed files with 112 additions and 74 deletions.
6 changes: 3 additions & 3 deletions fvgp/gp2.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,8 +251,8 @@ def __init__(
###init likelihood instance#############
########################################
# likelihood needs hps, noise function
self.likelihood = GPlikelihood(hyperparameters=self.prior.hyperparameters,
noise_variances=noise_variances,
self.likelihood = GPlikelihood(self.data,
hyperparameters=self.prior.hyperparameters,
gp_noise_function=gp_noise_function,
gp_noise_function_grad=gp_noise_function_grad,
ram_economy=ram_economy,
Expand All @@ -272,7 +272,7 @@ def __init__(
gp_kernel_function,
gp_mean_function,
gp_noise_function,
init_hyperparameters=self.prior.hyperparameters,
init_hyperparameters=self.prior.hyperparameters, ##random init_hyperparameters will not be abailable in the prior_obj
hyperparameter_bounds=hyperparameter_bounds)

##########################################
Expand Down
2 changes: 1 addition & 1 deletion fvgp/gp_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def __init__(self, x_data, y_data, noise_variances=None):
self.noise_variances = noise_variances
self.point_number = len(self.x_data)

def update_data(self, x_data_new, y_data_new, noise_variances_new=None, append=True):
def update(self, x_data_new, y_data_new, noise_variances_new=None, append=True):
assert isinstance(x_data_new, np.ndarray) or isinstance(x_data_new, list)
assert isinstance(y_data_new, np.ndarray) and np.ndim(y_data_new) == 1
assert ((isinstance(noise_variances_new, np.ndarray) and np.ndim(noise_variances_new) == 1)
Expand Down
44 changes: 31 additions & 13 deletions fvgp/gp_likelihood.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,41 @@
import numpy as np
import warnings


class GPlikelihood: # pragma: no cover
def __init__(self, hyperparameters=None,
noise_variances=None,
def __init__(self, data_obj,
hyperparameters=None,
gp_noise_function=None,
gp_noise_function_grad=None,
ram_economy=False,
gp2Scale=False,
online=False):

assert isinstance(hyperparameters, np.ndarray) and np.ndim(hyperparameters) == 1
self.data_obj = data_obj
self.x_data = self.data_obj.x_data
self.y_data = self.data_obj.y_data
self.noise_variances = self.data_obj.noise_variances
assert self.noise_variances is None or isinstance(self.noise_variances, np.ndarray)

if isinstance(self.noise_variances, np.ndarray):
assert np.ndim(self.noise_variances) == 1
assert all(self.noise_variances > 0.0)

self.gp2Scale = gp2Scale

if noise_variances is not None and callable(gp_noise_function):
warnings.warn("Noise function and measurement noise provided. noise_variances set to None.", stacklevel=2)
noise_variances = None
if self.noise_variances is not None and callable(gp_noise_function):
raise Exception("Noise function and measurement noise provided. Decide which one to use.", stacklevel=2)
if callable(gp_noise_function):
self.noise_function = gp_noise_function
elif noise_variances is not None:
self.noise_function = None
elif self.noise_variances is not None:
self.noise_function = self._measured_noise_function
else:
warnings.warn(
"No noise function or measurement noise provided. Noise variances will be set to 1% of mean(y_data).",
stacklevel=2)
self.noise_function = self._default_noise_function

if callable(gp_noise_function_grad):
self.noise_function_grad = gp_noise_function_grad
elif callable(gp_noise_function):
Expand All @@ -37,18 +49,24 @@ def __init__(self, hyperparameters=None,
else:
self.noise_function_grad = self._default_dnoise_dh

self.V = self.noise_function(self.x_data, hyperparameters, self)
self.online = online

##################################################################################


def update(self, hyperparameters=None):
self.x_data = self.data_obj.x_data
self.y_data = self.data_obj.y_data
self.noise_variances = self.data_obj.noise_variances
self.V = self.noise_function(self.x_data, hyperparameters, self)

def _default_noise_function(self, x, hyperparameters, gp_obj):
noise = np.ones((len(x))) * (np.mean(abs(self.y_data)) / 100.0)
if self.gp2Scale:
return self.gp2Scale_obj.calculate_sparse_noise_covariance(noise)
else:
return np.diag(noise)
if self.gp2Scale: return self.gp2Scale_obj.calculate_sparse_noise_covariance(noise)
else: return np.diag(noise)

def _measured_noise_function(self, x, hyperparameters, gp_obj):
if self.gp2Scale: return self.gp2Scale_obj.calculate_sparse_default_noise_covariance(noise) #should be here
else: return np.diag(self.noise_variances)

def _default_dnoise_dh(self, x, hps, gp_obj):
gr = np.zeros((len(hps), len(x), len(x)))
Expand Down
76 changes: 45 additions & 31 deletions fvgp/gp_marginal_density.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,53 +2,67 @@


class GPMarginalDensity:
def __init__(self, data_obj, prior_obj, likelihood_obj):
def __init__(self,
data_obj,
prior_obj,
likelihood_obj,
store_inv=False):
self.data_obj = data_obj
self.prior_obj = prior_obj
self.likelihood_obj = likelihood_obj
self.store_inv = store_inv
self.K = prior_obj.K
self.k = None
self.kk = None
self.V = likelihood_obj.V
self.y_data = data_obj.y_data
self.y_mean = data_obj.y_data - prior_obj.prior_mean_vector

self.KV, self.KVinvY, self.KVlogdet, self.factorization_obj, self.KVinv = self._compute_GPpriorV()

def update(self):
self.K = prior_obj.K
self.k = prior_obj.k
self.kk = prior_obj.kk
self.V = likelihood_obj.V
self.y_data = data_obj.y_data
self.y_mean = data_obj.y_data - prior_obj.prior_mean_vector
self.KV, self.KVinvY, self.KVlogdet, self.factorization_obj, self.KVinv = self._update_GPpriorV()

def _compute_GPpriorV(self):

def _compute_GPpriorV(self, x_data, y_data, hyperparameters, calc_inv=False):
# get the prior mean
prior_mean_vec = self.mean_function(x_data, hyperparameters, self)
assert np.ndim(prior_mean_vec) == 1
# get the latest noise
V = self.noise_function(x_data, hyperparameters, self)
assert np.ndim(V) == 2
K = self._compute_K(x_data, x_data, hyperparameters)
# check if shapes are correct
if K.shape != V.shape: raise Exception("Noise covariance and prior covariance not of the same shape.")
# get K + V
K = self.prior_obj.K
V = self.likelihood_obj.V
# check if shapes are correct
assert K.shape == V.shape
KV = K + V

# get Kinv/KVinvY, LU, Chol, logdet(KV)
KVinvY, KVlogdet, factorization_obj, KVinv = self._compute_gp_linalg(y_data-prior_mean_vec, KV,
calc_inv=calc_inv)
return K, KV, KVinvY, KVlogdet, factorization_obj, KVinv, prior_mean_vec, V
KVinvY, KVlogdet, factorization_obj, KVinv = self._compute_gp_linalg(self.y_mean, KV,
calc_inv=calc_inv)
return KV, KVinvY, KVlogdet, factorization_obj, KVinv

##################################################################################

def _update_GPpriorV(self, x_data_old, x_new, y_data, hyperparameters, calc_inv=False):
# get the prior mean
prior_mean_vec = np.append(self.prior_mean_vec, self.mean_function(x_new, hyperparameters, self))
assert np.ndim(prior_mean_vec) == 1
# get the latest noise
V = self.noise_function(self.data.x_data, hyperparameters, self) #can be avoided by update
assert np.ndim(V) == 2
def _update_GPpriorV(self):
# get K
K = self.update_K(x_data_old, x_new, hyperparameters)

K = self.prior_obj.K
k = self.prior_obj.k #this should be kk + the right part of V
kk = self.prior_obj.kk #this should be kk + the right part of V
V = self.likelihood_obj.V
# check if shapes are correct
if K.shape != V.shape: raise Exception("Noise covariance and prior covariance not of the same shape.")
assert K.shape == V.shape

# get K + V
KV = K + V
# get Kinv/KVinvY, LU, Chol, logdet(KV)

if self.online_mode is True: KVinvY, KVlogdet, factorization_obj, KVinv = self._compute_gp_linalg(
y_data - prior_mean_vec, k, kk)
else: KVinvY, KVlogdet, factorization_obj, KVinv = self._compute_gp_linalg(y_data-prior_mean_vec, KV,
calc_inv=calc_inv)
return K, KV, KVinvY, KVlogdet, factorization_obj, KVinv, prior_mean_vec, V
if self.online_mode is True:
KVinvY, KVlogdet, factorization_obj, KVinv = self._update_gp_linalg(
self.y_mean, k, kk)
else:
KVinvY, KVlogdet, factorization_obj, KVinv = self._compute_gp_linalg(self.y_mean, KV)
return KV, KVinvY, KVlogdet, factorization_obj, KVinv

##################################################################################
def _compute_gp_linalg(self, vec, KV, calc_inv=False):
Expand All @@ -66,7 +80,7 @@ def _update_gp_linalg(self, vec, k, kk):
X = self._inv(kk - C @ self.KVinv @ B)
F = -self.KVinv @ k @ X
KVinv = np.block([[self.KVinv + self.KVinv @ B @ X @ C @ self.KVinv, F],
[F.T, X]])
[F.T, X]])
factorization_obj = ("Inv", None)
KVinvY = KVinv @ vec
KVlogdet = self.KVlogdet + self._logdet(kk - k.T @ self.KVinv @ k)
Expand Down
5 changes: 3 additions & 2 deletions fvgp/gp_posterior.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
class GPosterior: # pragma: no cover
def __init__(self, prior_obj, data_obj):
assert isinstance(KVinvY, np.ndarray)
def __init__(self, marginal_density_obj, data_obj):
assert isinstance(marginal_density_obj.KVinvY, np.ndarray)

self.KV = prior_obj.KV
self.factorization_obj = prior_obj.factorization_obj
self.prior_obj = prior_obj
Expand Down
53 changes: 29 additions & 24 deletions fvgp/gp_prior.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,54 +81,59 @@ def __init__(self, data_obj,
else:
self.dm_dh = self._default_dm_dh

self.prior_mean_vector, self.K = self.compute_prior(self.x_data, self.hyperparameters)
self.prior_mean_vector, self.K = self.compute_prior(self.x_data)
self.k = None
self.kk = None

def update(self, x_data, x_new):
self.prior_mean_vector, self.K = self.update_prior(x_data, x_new, hyperparameters)

def compute_prior(self, x_data, hyperparameters):
self.m = self.compute_mean(x_data, hyperparameters)
self.K = self.compute_K(x_data, hyperparameters)
assert np.ndim(self.m ) == 1
assert np.ndim(self.K) == 2
return self.m, self.K

def update_prior(self, x_data, x_new, hyperparameters):
self.m = self.update_mean(x_new, hyperparameters)
self.K = self.update_K(x_data, x_new, hyperparameters)
self.prior_mean_vector, self.K, self.k, self.kk = self.update_prior(x_data, x_new)

def set_hyperparameters(self, hyperparameters):
self.hyperparameters = hyperparameters

def compute_prior(self, x_data):
m = self.compute_mean(x_data)
K = self.compute_K(x_data)
assert np.ndim(m) == 1
assert np.ndim(K) == 2
return m, K

def update_prior(self, x_data, x_new):
m = self.update_mean(x_new)
K, k, kk = self.update_K(x_data, x_new)
assert np.ndim(prior_mean_vec) == 1
assert np.ndim(K) == 2
return self.m, self.K
return m, K, k, kk

def compute_K(self, x, hyperparameters):
def compute_K(self, x):
"""computes the covariance matrix from the kernel"""
# if gp2Scale:
# else:
K = self.kernel(x, x, hyperparameters, self)
K = self.kernel(x, x, self.hyperparameters, self)
return K

def update_K(self, x_data, x_new, hyperparameters):
def update_K(self, x_data, x_new):
# if gp2Scale: ...
# else:
k = self._compute_K(x_data, x_new, hyperparameters)
kk = self._compute_K(x_new, x_new, hyperparameters)
k = self._compute_K(x_data, x_new)
kk = self._compute_K(x_new, x_new)
K = np.block([
[self.K, k],
[k.T, kk]
])
return K
return K, k, kk

def compute_mean(self, x_data, hyperparameters):
def compute_mean(self, x_data):
"""computes the covariance matrix from the kernel"""
# if gp2Scale:
# else:
m = self.mean_function(x_data, hyperparameters, self)
m = self.mean_function(x_data,self. hyperparameters, self)
return m

def update_mean(self, x_new, hyperparameters):
def update_mean(self, x_new):
# if gp2Scale: ...
# else:
m = np.append(self.prior_mean_vec, self.mean_function(x_new, hyperparameters, self))
m = np.append(self.prior_mean_vec, self.mean_function(x_new, self.hyperparameters, self))
return m

####################################################
Expand Down

0 comments on commit eb23c47

Please sign in to comment.