Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
okbalefthanded committed Sep 28, 2022
2 parents d7f259e + 8334b8a commit 27790c7
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 6 deletions.
20 changes: 14 additions & 6 deletions baseline/erp/blda.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,18 +22,26 @@ def fit(self, X, y=None):
# compute regression targets from class labels
# (to do lda via regression)
n_instances = y.shape[0]

y = y.astype(np.float32)
y = y.T
classes = np.unique(y)
yy = y.astype(np.float32)
yy = yy.T
# y = y.astype(np.float32)
# y = y.T
# classes = np.unique(y)
classes = np.unique(yy)
if -1 in classes:
self.neg_class = -1.
'''
n_posexamples = np.sum(y==self.pos_class)
n_negexamples = np.sum(y==self.neg_class)
n_examples = n_posexamples + n_negexamples
y[y==self.pos_class] = n_examples / n_posexamples
y[y==self.neg_class] = -n_examples / n_negexamples

'''
n_posexamples = np.sum(yy==self.pos_class)
n_negexamples = np.sum(yy==self.neg_class)
n_examples = n_posexamples + n_negexamples
yy[yy==self.pos_class] = n_examples / n_posexamples
yy[yy==self.neg_class] = -n_examples / n_negexamples
# n_posexamples = np.sum(y==1)
# n_negexamples = np.sum(y==-1)
# n_examples = n_posexamples + n_negexamples
Expand Down Expand Up @@ -130,5 +138,5 @@ def predict_proba(self,X):
# return softmax(self.decision_function(X))
# return self.decision_function(X)
decision = self.decision_function(X)
return expit(decision)
return expit(decision).squeeze()

1 change: 1 addition & 0 deletions baseline/erp/stepwise/stepwise.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ def stepcalc(allx, y, inmodel):
# to rows of X that were thrown out.
b = np.zeros((nin, 1))
Qb = np.dot(Q.conj().T, y)
print(Qb.shape, Qb.min(), Qb.max())
Qb[abs(Qb) < tol * max(abs(Qb))] = 0
b[perm] = linalg.solve(R, Qb)

Expand Down

0 comments on commit 27790c7

Please sign in to comment.