-
Notifications
You must be signed in to change notification settings - Fork 0
/
testKeras.py
75 lines (51 loc) · 2.37 KB
/
testKeras.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.layers import Input, Embedding, LSTM, Dense
from keras.models import Model
# Sequential Model
# create a model
model = keras.models.Sequential()
#1 draw the graph
model.add(keras.layers.Dense(units=64, input_dim=100))
model.add(keras.layers.Activation("relu"))
model.add(Dropout(0.5))
model.add(keras.layers.Dense(units=10))
model.add(keras.layers.Activation("softmax"))
# 2 compile
model.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['Accuracy'])
# sgd = keras.optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9,nesterov=True)
# model.compile(loss='categorical_crossentropy',optimizer=sgd)
# optimizer='rmsprop', loss='binary_crossentropy'
# 3 train/fit
hist = model.fit(x_train, y_train, epochs=5, batch_size=32, validation_split=0.2)
# x_train and y_train are Numpy arrays
# validation_split : split last 20% input as validation set
# note that the input should be shuffled manually before, because keras will process validation_split before auto_shuffle
# model.train_on_batch(x_batch, y_batch)
print(hist.history) # hist.history includes the loss and other metrics after each epoch
# 4 predict on test
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=128)
# or make predictions on new data
classes = model.predict(x_test, batch_size=128)
# save and load the Keras model (in HDF5 format)
model.save(filepath)
del model # deletes the existing model
model = keras.models.load_model(filepath)
# Functional Model
# 0 define inputs
inputs = keras.layers.Input(shape=(784,), dtype='int32', name='main_input')
x = keras.layers.concatenate([lstm_out, auxiliary_input])
# 1 draw the graph
x = keras.layers.Embedding(output_dim=512, input_dim=10000, input_length=100)(inputs)
lstm_out = keras.layers.LSTM(32)(x)
x = keras.layers.Dense(output_dim=64,activation='relu')(lstm_out)
x = keras.layers.Dense(output_dim=64,activation='relu')(x)
predictions = keras.layers.Dense(output_dim=10,activation='softmax')(x)
model = keras.models.Model(input=inputs,output=predictions)
model = keras.models.Model(inputs=[main_input, auxiliary_input], outputs=[main_output, auxiliary_output])
# 2 compile
model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
# 3 train
model.fit(data, labels) # starts training