-
Notifications
You must be signed in to change notification settings - Fork 2
/
feature_selection_tryout.py
executable file
·67 lines (59 loc) · 2.43 KB
/
feature_selection_tryout.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# coding=utf-8
from sklearn.ensemble import GradientBoostingClassifier
from xgboost import XGBClassifier
from sklearn.model_selection import StratifiedKFold
from sklearn.feature_selection import RFECV
from sklearn.datasets import make_classification
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.preprocessing import Imputer
from sklearn.preprocessing import LabelEncoder, StandardScaler, Normalizer
from sklearn.model_selection import train_test_split, cross_val_score, KFold, cross_val_predict
from sklearn.metrics import confusion_matrix, classification_report, accuracy_score
from sklearn.svm import SVC, LinearSVC, NuSVC
from sklearn.ensemble import RandomForestClassifier
import inspect
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.neural_network import MLPClassifier
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2, f_classif
filter = 'butter_5hz_lowpass'
def retrieve_name(var):
callers_local_vars = inspect.currentframe().f_back.f_locals.items()
return [var_name for var_name, var_val in callers_local_vars if var_val is var]
data = pd.read_csv('combined_trainingsdata_{}.csv'.format(filter))
X = data.iloc[:,:-1].values
Y = data.iloc[:,-1]
labelencoder_Y =LabelEncoder()
Y = labelencoder_Y.fit_transform(Y)
print('Y shape')
print(Y.shape)
labelencoder_Y =LabelEncoder()
Y = labelencoder_Y.fit_transform(Y)
# ================================================Splitting Training/Test Data==========================================
imputer = Imputer(missing_values='NaN', strategy = 'mean', axis=0)
imputer = imputer.fit(X)
X = imputer.transform(X)
#training and testing splitting multi class
sc_X = StandardScaler()
X = sc_X.fit_transform(X)
print('X shape before passing to classifier')
print(X.shape)
print('Y shape before passsing to classifier')
print(Y.shape)
# ================================================Model Selection=======================================================
#classifier
xgb_clf = XGBClassifier()
xgb_clf_bus = XGBClassifier()
for i in range(X.shape[1]):
try:
X_new=SelectKBest(f_classif,k=i+1).fit(X,Y)
except ValueError:
X_new=SelectKBest(f_classif,k='all').fit(X,Y)
X_new = X_new.transform(X)
print(X_new)
y_hats = (cross_val_predict(xgb_clf, X_new,Y,cv=10))
print(y_hats)
accs_clf = (np.average(accuracy_score(Y,y_hats)))
print(accs_clf)