-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.py
67 lines (55 loc) · 1.68 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
from Event import Event
from sklearn.model_selection import train_test_split
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
#Reading and balancing data
file="./magic04.data"
events=[]
classCount=6688
gammaCount=0
with open(file,"r") as input:
for x in input:
y=x.split(",")
event=Event(y[0],y[1],y[2],y[3],y[4],y[5],y[6],y[7],y[8],y[9],y[10])
if event.isGamma():
if gammaCount<classCount:
gammaCount+=1
events.append(event)
else:
events.append(event)
input.close()
#splitting data
training,test=train_test_split(events,test_size=0.3,random_state=42)
#preparing data
train_x=[]
train_y=[]
test_x=[]
test_y=[]
for x in training:
train_x.append(x.getArray())
train_y.append(x.clas)
for x in test:
test_x.append(x.getArray())
test_y.append(x.clas)
model = keras.Sequential()
model.add(layers.Dense(12, input_dim=8, activation='relu'))
model.add(layers.Dense(8, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))
# compile the keras model
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# fit the keras model on the dataset
model.fit(train_x, train_y, epochs=150, batch_size=10)
# evaluate the keras model
_, accuracy = model.evaluate(train_x, train_y)
print('Accuracy: %.2f' % (accuracy*100))
#Decision Tree
#decisionTree(train_x,train_y,test_x,test_y)
#Random Forest
#randomForest(train_x,train_y,test_x,test_y,n_estimators=5)
#Ada Boost
#adaBoost(train_x,train_y,test_x,test_y,n_estimators=5)
#KNN
#kNearestNeighbor(train_x,train_y,test_x,test_y,k=5)
#Naive Bayes
#naiveBayes(train_x,train_y,test_x,test_y)