Skip to content

Commit

Permalink
Fixing broken kernels due to the add_parameter --> link_parameter rename
Browse files Browse the repository at this point in the history
  • Loading branch information
Elizabeth committed Apr 15, 2022
1 parent bb1bc50 commit cc9d22c
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 23 deletions.
38 changes: 19 additions & 19 deletions GPy/kern/src/splitKern.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,25 +14,25 @@ def __init__(self, kernel, idx_p, Xp, index_dim=-1, name='DiffGenomeKern'):
self.index_dim=index_dim
self.kern = SplitKern(kernel,Xp, index_dim=index_dim)
super(DEtime, self).__init__(input_dim=kernel.input_dim+1, active_dims=None, name=name)
self.add_parameter(self.kern)
self.link_parameter(self.kern)

def K(self, X, X2=None):
assert X2==None
K = self.kern.K(X,X2)

if self.idx_p<=0 or self.idx_p>X.shape[0]/2:
return K

slices = index_to_slices(X[:,self.index_dim])
idx_start = slices[1][0].start
idx_end = idx_start+self.idx_p
K_c = K[idx_start:idx_end,idx_start:idx_end].copy()
K[idx_start:idx_end,:] = K[:self.idx_p,:]
K[:,idx_start:idx_end] = K[:,:self.idx_p]
K[idx_start:idx_end,idx_start:idx_end] = K_c

return K

def Kdiag(self,X):
Kdiag = self.kern.Kdiag(X)

Expand All @@ -43,19 +43,19 @@ def Kdiag(self,X):
idx_start = slices[1][0].start
idx_end = idx_start+self.idx_p
Kdiag[idx_start:idx_end] = Kdiag[:self.idx_p]

return Kdiag

def update_gradients_full(self,dL_dK,X,X2=None):
assert X2==None
if self.idx_p<=0 or self.idx_p>X.shape[0]/2:
self.kern.update_gradients_full(dL_dK, X)
return

slices = index_to_slices(X[:,self.index_dim])
idx_start = slices[1][0].start
idx_end = idx_start+self.idx_p

self.kern.update_gradients_full(dL_dK[idx_start:idx_end,:], X[:self.idx_p],X)
grad_p1 = self.kern.gradient.copy()
self.kern.update_gradients_full(dL_dK[:,idx_start:idx_end], X, X[:self.idx_p])
Expand Down Expand Up @@ -108,7 +108,7 @@ def K(self,X ,X2=None):
if len(slices)>1:
[target.__setitem__((s,s2), self.kern_cross.K(X[s,:],X2[s2,:])) for s,s2 in itertools.product(slices[1], slices2[0])]
if len(slices2)>1:
[target.__setitem__((s,s2), self.kern_cross.K(X[s,:],X2[s2,:])) for s,s2 in itertools.product(slices[0], slices2[1])]
[target.__setitem__((s,s2), self.kern_cross.K(X[s,:],X2[s2,:])) for s,s2 in itertools.product(slices[0], slices2[1])]
return target

def Kdiag(self,X):
Expand All @@ -125,7 +125,7 @@ def collate_grads(dL, X, X2, cross=False):
else:
self.kern.update_gradients_full(dL,X,X2)
target[:] += self.kern.gradient

if X2 is None:
assert dL_dK.shape==(X.shape[0],X.shape[0])
[[collate_grads(dL_dK[s,ss], X[s], X[ss]) for s,ss in itertools.product(slices_i, slices_i)] for slices_i in slices]
Expand Down Expand Up @@ -154,20 +154,20 @@ def __init__(self, kernel, Xp, name='SplitKern_cross'):
Xp = np.array([[Xp]])
self.Xp = Xp
super(SplitKern_cross, self).__init__(input_dim=kernel.input_dim, active_dims=None, name=name)

def K(self, X, X2=None):
if X2 is None:
return np.dot(self.kern.K(X,self.Xp),self.kern.K(self.Xp,X))/self.kern.K(self.Xp,self.Xp)
else:
return np.dot(self.kern.K(X,self.Xp),self.kern.K(self.Xp,X2))/self.kern.K(self.Xp,self.Xp)

def Kdiag(self, X):
return np.inner(self.kern.K(X,self.Xp),self.kern.K(self.Xp,X).T)/self.kern.K(self.Xp,self.Xp)

def update_gradients_full(self, dL_dK, X, X2=None):
if X2 is None:
X2 = X

k1 = self.kern.K(X,self.Xp)
k2 = self.kern.K(self.Xp,X2)
k3 = self.kern.K(self.Xp,self.Xp)
Expand All @@ -181,7 +181,7 @@ def update_gradients_full(self, dL_dK, X, X2=None):
grad += self.kern.gradient.copy()
self.kern.update_gradients_full(np.array([[dL_dk3]]),self.Xp,self.Xp)
grad += self.kern.gradient.copy()

self.kern.gradient = grad

def update_gradients_diag(self, dL_dKdiag, X):
Expand All @@ -191,14 +191,14 @@ def update_gradients_diag(self, dL_dKdiag, X):
dL_dk1 = dL_dKdiag*k2[0]/k3
dL_dk2 = dL_dKdiag*k1[:,0]/k3
dL_dk3 = -dL_dKdiag*(k1[:,0]*k2[0]).sum()/(k3*k3)

self.kern.update_gradients_full(dL_dk1[:,None],X,self.Xp)
grad1 = self.kern.gradient.copy()
self.kern.update_gradients_full(dL_dk2[None,:],self.Xp,X)
grad2 = self.kern.gradient.copy()
self.kern.update_gradients_full(np.array([[dL_dk3]]),self.Xp,self.Xp)
grad3 = self.kern.gradient.copy()

self.kern.gradient = grad1+grad2+grad3


2 changes: 1 addition & 1 deletion GPy/kern/src/todo/symmetric.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def __init__(self,k,transform=None):
self.num_params = k.num_params
self.name = k.name + '_symm'
self.k = k
self.add_parameter(k)
self.link_parameter(k)
#self._set_params(k._get_params())

def K(self,X,X2,target):
Expand Down
6 changes: 3 additions & 3 deletions GPy/kern/src/trunclinear.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ def __init__(self, input_dim, variances=None, delta=None, ARD=False, active_dims

self.variances = Param('variances', variances, Logexp())
self.delta = Param('delta', delta)
self.add_parameter(self.variances)
self.add_parameter(self.delta)
self.link_parameter(self.variances)
self.link_parameter(self.delta)

@Cache_this(limit=3)
def K(self, X, X2=None):
Expand Down Expand Up @@ -146,7 +146,7 @@ def __init__(self, input_dim, interval, variances=None, ARD=False, active_dims=N
variances = np.ones(self.input_dim)

self.variances = Param('variances', variances, Logexp())
self.add_parameter(self.variances)
self.link_parameter(self.variances)


# @Cache_this(limit=3)
Expand Down

0 comments on commit cc9d22c

Please sign in to comment.