-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlda.py
50 lines (36 loc) · 1.35 KB
/
lda.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import numpy as np
class LDA:
"""
Fisher Linear Discriminant Analysis (LDA) for dimensionality reduction and classification.
References:
https://usir.salford.ac.uk/id/eprint/52074/1/AI_Com_LDA_Tarek.pdf
"""
def __init__(self, n_components=2):
self.n_components = n_components
# Training parameters
self.evecs = None
def fit(self, X, y):
self.n_samples, self.n_features = X.shape
self.classes = np.unique(y)
self.n_classes = len(self.classes)
# Compute within class scatter, and between class scatter
S_W = np.zeros((self.n_features, self.n_features))
for c in self.classes:
X_c = X[y == c]
N_c = len(X_c)
# Within class scatter matrix
S_W += (N_c - 1) * np.cov(X_c.T)
# Between class scatter matrix
S_T = self.n_samples * np.cov(X.T)
S_B = S_T - S_W
# Compute eigenvalues and eigenvectors for W = S_W^-1 * S_B
W = np.dot(np.linalg.inv(S_W), S_B)
evals, evecs = np.linalg.eig(W)
# Select top n_components eigenvectors
idx = evals.argsort()[::-1]
self.evecs = evecs[:, idx][:, :self.n_components]
def transform(self, X):
return np.dot(X, self.evecs)
def fit_transform(self, X, y):
self.fit(X, y)
return self.transform(X)