Skip to content

Commit

Permalink
restructuring in progress
Browse files Browse the repository at this point in the history
  • Loading branch information
MarcusMNoack committed Mar 16, 2024
1 parent 8efcb31 commit 98494a0
Show file tree
Hide file tree
Showing 6 changed files with 786 additions and 4 deletions.
8 changes: 4 additions & 4 deletions fvgp/gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -1256,7 +1256,7 @@ def _compute_gp_linalg(self, vec, KV, calc_inv=False, try_sparse_LU=False):
if self.info: print("logdet() in progress ... ", time.time() - st, "seconds.")
KVlogdet, info_slq = logdet(KV, method='slq', min_num_samples=10, max_num_samples=100,
lanczos_degree=20, error_rtol=0.1, gpu=gpu,
return_info=True, plot=False, verbose=self.info)
return_info=True, plot=False, verbose=False)
if self.info: print("logdet/LU compute time: ", time.time() - st, "seconds.")
# if the problem is large go with rand. lin. algebra straight away
else:
Expand All @@ -1269,7 +1269,7 @@ def _compute_gp_linalg(self, vec, KV, calc_inv=False, try_sparse_LU=False):
if self.info: print("logdet() in progress ... ", time.time() - st, "seconds.")
KVlogdet, info_slq = logdet(KV, method='slq', min_num_samples=10, max_num_samples=100,
lanczos_degree=20, error_rtol=0.1, orthogonalize=0, gpu=gpu,
return_info=True, plot=False, verbose=self.info)
return_info=True, plot=False, verbose=False)
if self.info: print("logdet/LU compute time: ", time.time() - st, "seconds.")
KVinv = None
else:
Expand Down Expand Up @@ -2076,10 +2076,10 @@ def gp_total_correlation(self, x_pred, x_out=None):

k = self.kernel(x_data, x_pred, self.hyperparameters, self)
kk = self.kernel(x_pred, x_pred, self.hyperparameters, self) + (np.identity(len(x_pred)) * 1e-9)
joint_covariance = np.asarray(np.block([[K, k], \
joint_covariance = np.asarray(np.block([[K, k],
[k.T, kk]]))

prod_covariance = np.asarray(np.block([[K, k * 0.], \
prod_covariance = np.asarray(np.block([[K, k * 0.],
[k.T * 0., kk * np.identity(len(kk))]]))

return {"x": x_pred,
Expand Down
34 changes: 34 additions & 0 deletions fvgp/gp_kernels.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import torch
from torch import nn




class Network(nn.Module): # pragma: no cover
def __init__(self, dim, layer_width):
super().__init__()
# Inputs to hidden layer linear transformation
self.layer1 = nn.Linear(dim, layer_width)
self.layer2 = nn.Linear(layer_width, layer_width)
self.layer3 = nn.Linear(layer_width, dim)

def forward(self, x):
x = torch.Tensor(x)
x = torch.nn.functional.relu(self.layer1(x))
x = torch.nn.functional.relu(self.layer2(x))
x = torch.nn.functional.relu(self.layer3(x))
return x.detach().numpy()

def set_weights(self,w1,w2,w3):
with torch.no_grad(): self.layer1.weight = nn.Parameter(torch.from_numpy(w1).float())
with torch.no_grad(): self.layer2.weight = nn.Parameter(torch.from_numpy(w2).float())
with torch.no_grad(): self.layer3.weight = nn.Parameter(torch.from_numpy(w3).float())
def set_biases(self,b1,b2,b3):
with torch.no_grad(): self.layer1.bias = nn.Parameter(torch.from_numpy(b1).float())
with torch.no_grad(): self.layer2.bias = nn.Parameter(torch.from_numpy(b2).float())
with torch.no_grad(): self.layer3.bias = nn.Parameter(torch.from_numpy(b3).float())

def get_weights(self):
return self.layer1.weight, self.layer2.weight, self.layer3.weight
def get_biases(self):
return self.layer1.bias, self.layer2.bias, self.layer3.bias
6 changes: 6 additions & 0 deletions fvgp/gp_likelihood.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

class GPlikelihood: # pragma: no cover
def __init__(self, K, V):
pass


5 changes: 5 additions & 0 deletions fvgp/gp_posterior.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@

class GPosterior: # pragma: no cover
def __init__(self, KVinvY=None):
assert isinstance(KVinvY, np.ndarray)

0 comments on commit 98494a0

Please sign in to comment.