-
Notifications
You must be signed in to change notification settings - Fork 0
/
keras-ex_with_packages.py
58 lines (45 loc) · 1.9 KB
/
keras-ex_with_packages.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
from keras import backend as K
from keras.losses import categorical_crossentropy
from keras.activations import softmax
from keras import optimizers
import numpy as np
dataset_size = 200000
X = np.random.rand(dataset_size, 2)
labels = np.zeros((dataset_size, 3))
labels[X[:, 0] > X[:,1]] = [0,0,1]
labels[X[:, 0] <= X[:,1]] = [1,0,0]
labels[X[:,1] + X[:, 0] > 1] = [0, 1, 0]
x = K.placeholder(shape=(None, 2))
t = K.placeholder(shape=(None, 3))
theta1 = K.random_normal_variable(shape=(2, 12), mean=0, scale=0.01)
bias1 = K.random_normal_variable(shape=(1, 12), mean=0, scale=0.01)
theta2 = K.random_normal_variable(shape=(12, 3), mean=0, scale=0.01)
bias2 = K.random_normal_variable(shape=(1, 3), mean=0, scale=0.01)
def forward(x):
y = K.dot(x, theta1) + bias1
y = K.maximum(y, 0.)
return K.dot(y, theta2) + bias2
loss = categorical_crossentropy(softmax(forward(x)),t)
params= [theta1, bias1, theta2, bias2]
# sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
sgd = optimizers.Adagrad(lr=0.01, epsilon=None, decay=0.0)
grad = sgd.get_gradients(loss,params)
f = K.function([x,t], [loss]+grad)
batch_size = 20
for i in range(min(dataset_size, 100000) // batch_size ):
lr = 0.5 * (.1 ** ( max(i - 100 , 0) // 1000))
sample = X[batch_size*i:batch_size*(i+1)]
target = labels[batch_size*i:batch_size*(i+1)]
outputs = f([sample, target])
for param,grad in zip(params, outputs[1:]):
K.set_value(param, K.eval(param) - grad * lr)
print("cost {} - learning rate {}".format(outputs[0], lr))
f = K.function([x], [K.argmax(forward(x),axis=1)])
accuracy = 0
for i in range(1000):
sample = X[batch_size*i:batch_size*(i+1)]
target = labels[batch_size*i:batch_size*(i+1)]
tt = f([sample])[0]
accuracy += np.sum(tt == np.argmax(target, axis=1))
print("Accuracy", accuracy / 1000. /batch_size)
# accuracy 99.44