-
Notifications
You must be signed in to change notification settings - Fork 0
/
train.py
119 lines (94 loc) · 3.34 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
from keras.layers import Conv2D, MaxPooling2D, AveragePooling2D, Flatten, \
Dense, Dropout
from keras.models import Sequential
from keras.callbacks import ModelCheckpoint, EarlyStopping
from sklearn.model_selection import train_test_split
import numpy as np
import matplotlib.pyplot as plt
import pickle
NUM_CLASSES = 4
IMG_SIZE = 48
EPOCHS = 32
BATCH_SIZE = 64
'''
Prepare data for neural network.
'''
#Open serialized image and label data.
with open("./data.pkl", "rb") as f:
faces, labels = pickle.load(f)
f.close()
#Split data into testing and training sets and preprocess image data.
X_train, X_test, Y_train, Y_test = train_test_split(faces,
labels,
test_size = 0.1,
shuffle = True)
X_test = np.array(X_test)
X_test = X_test.reshape(X_test.shape[0], IMG_SIZE, IMG_SIZE, 1)
X_train = np.array(X_train)
X_train = X_train.reshape(X_train.shape[0], IMG_SIZE, IMG_SIZE, 1)
'''
Define model structure.
'''
#Initialize model.
model = Sequential()
#Convolutional layers with pooling.
model.add(Conv2D(32, (5, 5), activation = "relu",
input_shape = (IMG_SIZE, IMG_SIZE, 1)))
model.add(MaxPooling2D(pool_size = (5, 5), strides = (2, 2)))
model.add(Conv2D(64, (3, 3), activation = "relu"))
model.add(Conv2D(64, (3, 3), activation = "relu"))
model.add(AveragePooling2D(pool_size = (3, 3), strides = (2, 2)))
model.add(Conv2D(128, (3, 3), activation = "relu"))
model.add(Conv2D(128, (3, 3), activation = "relu"))
model.add(AveragePooling2D(pool_size = (3, 3), strides = (2, 2)))
#Fully connected layer followed by 0.5 dropout.
model.add(Flatten())
model.add(Dense(1024, activation = "relu"))
model.add(Dropout(0.5))
#Output layer.
model.add(Dense(NUM_CLASSES, activation = "softmax"))
#Compile model.
model.compile(loss = "categorical_crossentropy",
optimizer = "adam",
metrics = ["accuracy"])
print ("\n\nTraining....\n\n")
#Declare callbacks.
filepath = "./models/model-{epoch:02d}-{val_acc:.2f}.h5"
callbacks = [EarlyStopping(monitor = "val_loss", patience = 0),
ModelCheckpoint(filepath = filepath,
monitor = "val_loss",
save_best_only = False,
mode = "auto",
period = 1)]
#Fit model and store statistics.
stats = model.fit(np.array(X_train),
Y_train,
epochs = EPOCHS,
batch_size = BATCH_SIZE,
validation_data = (X_test, Y_test),
shuffle = True,
verbose = 1,
callbacks = callbacks)
'''
Evaluate models.
'''
acc = stats.history["acc"]
val_acc = stats.history["val_acc"]
loss = stats.history["loss"]
val_loss = stats.history["val_loss"]
x = range(len(acc))
#Plot accuracy vs epoch.
plt.subplot(2, 1, 1)
plt.plot(x, acc, "ko", label = "Training", linewidth = 3)
plt.plot(x, val_acc, "k", label = "Validation", linewidth = 3)
plt.ylabel("Accuracy")
plt.ylim((0,1))
plt.legend()
#Plot loss vs epoch.
plt.subplot(2, 1, 2)
plt.plot(x, loss, "ko", label = "Training", linewidth = 3)
plt.plot(x, val_loss, "k", label = "Validation", linewidth = 3)
plt.ylabel("Loss")
plt.xlabel("Epoch")
#Save figure.
plt.savefig("evaluation.png")