Skip to content

Commit

Permalink
small change in kl div
Browse files Browse the repository at this point in the history
  • Loading branch information
MarcusMNoack committed Dec 13, 2023
1 parent fd123c9 commit 5e2197c
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 11 deletions.
16 changes: 9 additions & 7 deletions fvgp/gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -501,7 +501,9 @@ def update_gp_data(
#######prepare noise covariances##########
##########################################
if noise_variances is not None and callable(self.noise_function):
warnings.warn("Noise function and measurement noise provided. noise_variances set to None", stacklevel=2)
warnings.warn("Noise function and measurement noise provided. \
This can happen if no measurement noise was provided at initialization.\
New noise_variances set to None", stacklevel=2)
noise_variances = None

if noise_variances is None:
Expand Down Expand Up @@ -1883,9 +1885,9 @@ def gp_kl_div(self, x_pred, comp_mean, comp_cov, x_out=None):
----------
x_pred : np.ndarray
A numpy array of shape (V x D), interpreted as an array of input point positions.
comp_mean : np.array
comp_mean : np.ndarray
Comparison mean vector for KL divergence. len(comp_mean) = len(x_pred)
comp_cov : np.array
comp_cov : np.ndarray
Comparison covariance matrix for KL divergence. shape(comp_cov) = (len(x_pred),len(x_pred))
x_out : np.ndarray, optional
Output coordinates in case of multitask GP use; a numpy array of size (N x L),
Expand All @@ -1906,8 +1908,8 @@ def gp_kl_div(self, x_pred, comp_mean, comp_cov, x_out=None):

res = self.posterior_mean(x_pred, x_out=None)
gp_mean = res["f(x)"]
gp_cov = self.posterior_covariance(x_pred, x_out=None)["S"]

gp_cov = self.posterior_covariance(x_pred, x_out=None)["S"] + np.identity(len(x_pred))*1e-9
comp_cov = comp_cov+np.identity(len(comp_cov))*1e-9
return {"x": x_pred,
"gp posterior mean": gp_mean,
"gp posterior covariance": gp_cov,
Expand Down Expand Up @@ -1949,9 +1951,9 @@ def gp_kl_div_grad(self, x_pred, comp_mean, comp_cov, direction, x_out=None):

gp_mean = self.posterior_mean(x_pred, x_out=None)["f(x)"]
gp_mean_grad = self.posterior_mean_grad(x_pred, direction=direction, x_out=None)["df/dx"]
gp_cov = self.posterior_covariance(x_pred, x_out=None)["S"]
gp_cov = self.posterior_covariance(x_pred, x_out=None)["S"] + np.identity(len(x_pred))*1e-9
gp_cov_grad = self.posterior_covariance_grad(x_pred, direction=direction, x_out=None)["dS/dx"]

comp_cov = comp_cov + np.identity(len(comp_cov)) * 1e-9
return {"x": x_pred,
"gp posterior mean": gp_mean,
"gp posterior mean grad": gp_mean_grad,
Expand Down
8 changes: 4 additions & 4 deletions fvgp/gpMCMC.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def run_mcmc(self, n_updates=10000,
"distribution var": np.var(x[int(len(x) - (len(x) / 10)):], axis=0)}

###############################################################
def _jump(self, x_old, obj, prior, likelihood): # pragma: no cover
def _jump(self, x_old, obj, prior_eval, likelihood): # pragma: no cover
x_star = x_old.copy()
if callable(obj.prop_dist):
print("obj indices: ", obj.indices)
Expand All @@ -139,11 +139,11 @@ def _jump(self, x_old, obj, prior, likelihood): # pragma: no cover
likelihood_star = self.log_likelihood_function(x_star, self.args)
if np.isnan(likelihood_star): likelihood_star = -np.inf
metr_ratio = np.exp(prior_evaluation_x_star + likelihood_star -
prior - likelihood)
prior_eval - likelihood)
if np.isnan(metr_ratio): metr_ratio = 0.
if metr_ratio > np.random.uniform(0, 1, 1):
x = x_star.copy()
prior = prior_evaluation_x_star
prior_eval = prior_evaluation_x_star
likelihood = likelihood_star
jump_trace = 1.
print("accepted")
Expand All @@ -156,7 +156,7 @@ def _jump(self, x_old, obj, prior, likelihood): # pragma: no cover
print("old x :", x_old)
print("new x :", x)
input()
return x, prior, likelihood, jump_trace
return x, prior_eval, likelihood, jump_trace

###############################################################

Expand Down

0 comments on commit 5e2197c

Please sign in to comment.