Skip to content

Commit

Permalink
Add bias to locallogisticregression, bug fix
Browse files Browse the repository at this point in the history
  • Loading branch information
christopherjenness committed Jun 24, 2018
1 parent 977649b commit 18aa7d9
Showing 1 changed file with 22 additions and 16 deletions.
38 changes: 22 additions & 16 deletions ML/kernelmethods.py
Expand Up @@ -126,30 +126,33 @@ def logistic_function(logistic_input):
def locallogisticHessian(self, theta, weights, reg_param):
"""
Hessian for regulatrized local logistic regression L2 loss
Args:
theta (np.array): Current lwlr parameters of shape
[1, n_features]
weights (np.array): training set weights of shape
[n_samples, 1]
reg_param (float): L2 regularization weight. If 0, no
no regulatrization is used.
Returns:
Hessian (np.ndarray): Hessian of shape [n_features, n_features]
"""
# Add bias to X
X = np.insert(self.X, 0, 1, axis=1)

D = []
for row in range(np.shape(self.X)[0]):
for row in range(np.shape(X)[0]):
D.append(weights[row] *
self.logistic_function(np.dot(self.X[row, :],
self.logistic_function(np.dot(X[row, :],
np.transpose(theta))) *
(1 -
self.logistic_function(np.dot(self.X[row, :],
self.logistic_function(np.dot(X[row, :],
np.transpose(theta)))))
D = np.diag(D)
hessian = (np.matmul(np.matmul(self.X.T, D),
self.X) -
np.identity(np.shape(self.X)[1]) * reg_param)
hessian = (np.matmul(np.matmul(X.T, D),
X) -
np.identity(np.shape(X)[1]) * reg_param)
return hessian

def locallogisticregression(self, x, kernel, gamma, reg_param=0,
Expand All @@ -171,26 +174,29 @@ def locallogisticregression(self, x, kernel, gamma, reg_param=0,
iterations (int): number of gradient descent steps to take
alpha (float): depth of each gradient descent step to take
"""
# Set training set weights for query point
W = [kernel(self.X[row], x, gamma)
for row in range(np.shape(self.X)[0])]
# Add bias to X
X = np.insert(self.X, 0, 1, axis=1)
x = np.insert(x, 0, 1)

# Set training set weights for query points
W = [kernel(X[row], x, gamma)
for row in range(np.shape(X)[0])]

# Initialize theta
theta = np.zeros(np.shape(self.X)[1]) + 0.0001
theta = np.zeros(np.shape(X)[1]) + 0.0001

# Newtons Method
iteration = 0
while iteration < iterations:
hessian = self.locallogisticHessian(theta, W, reg_param)
z = [W[row] * (self.y[row] -
self.logistic_function(np.dot(self.X[row, :],
self.logistic_function(np.dot(X[row, :],
theta)))
for row in range(np.shape(self.X)[0])]
gradient = np.matmul(self.X.T, z) - (reg_param * theta)
for row in range(np.shape(X)[0])]
gradient = np.matmul(X.T, z) - (reg_param * theta)
step_direction = -np.dot(np.linalg.pinv(hessian), gradient)
theta = theta + alpha * step_direction
iteration += 1
print('****', theta, x, np.dot(x, theta))
prediction = self.logistic_function(np.dot(x, theta))
return prediction

Expand Down

0 comments on commit 18aa7d9

Please sign in to comment.