-
Notifications
You must be signed in to change notification settings - Fork 0
/
Cifar10.py
111 lines (84 loc) · 3.61 KB
/
Cifar10.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import os
import pickle as cPickle
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
import mnist
DATA_PATH = "./cifar-10-batches-py/"
def unpickle(file):
with open(os.path.join(DATA_PATH, file), 'rb') as fo:
dict = cPickle.load(fo, encoding='bytes')
return dict
def one_hot(vec, vals=10):
n = len(vec)
out = np.zeros((n, vals))
out[range(n), vec] = 1
return out
class CifarLoader(object):
def __init__(self, source_files):
self._source = source_files
self._i = 0
self.images = None
self.labels = None
def load(self):
data = [unpickle(f) for f in self._source]
images = np.vstack(d[b'data'] for d in data)
n = len(images)
self.images = images.reshape(n, 3, 32, 32).transpose(0, 2, 3, 1).astype(float) / 255
self.labels = one_hot(np.hstack([d[b'labels'] for d in data]), 10)
return self
def next_batch(self, batch_size):
x, y = self.images[self._i:self._i + batch_size], self.labels[self._i:self._i + batch_size]
self._i = (self._i + batch_size) % len(self.images)
return x, y
class CifarDataManager(object):
def __init__(self):
self.train = CifarLoader(["data_batch_{}".format(i) for i in range(1,6)]).load()
self.test = CifarLoader(["test_batch"]).load()
if __name__ == "__main__":
'''
def display_cifar(images, size):
n = len(images)
plt.figure()
plt.gca().set_axis_off()
im = np.vstack([np.hstack([images[np.random.choice(n)] for i in range(size)]) for i in range(size)])
plt.imshow(im)
plt.show()
d = CifarDataManager()
print(f"Number of train images: {len(d.train.images)}")
print(f"Number of train labels: {len(d.train.labels)}")
print(f"Number of test images: {len(d.test.images)}")
print(f"Number of test labels: {len(d.test.labels)}")
images = d.train.images
display_cifar(images, 10)
'''
cifar = CifarDataManager()
x = tf.placeholder(tf.float32, shape=[None, 32, 32, 3])
y_ = tf.placeholder(tf.float32, shape=[None, 10])
keep_prob = tf.placeholder(tf.float32)
conv1 = mnist.conv_layer(x, shape=[5, 5, 3, 32])
conv1_pool = mnist.max_pool_2x2(conv1)
conv2 = mnist.conv_layer(conv1_pool, shape=[5, 5, 32, 64])
conv2_pool = mnist.max_pool_2x2(conv2)
conv3 = mnist.conv_layer(conv2_pool, shape=[5, 5, 64, 128])
conv3_pool = mnist.max_pool_2x2(conv3)
conv3_flat = tf.reshape(conv3_pool, [-1, 4 * 4 * 128])
conv3_drop = tf.nn.dropout(conv3_flat, keep_prob=keep_prob)
full_1 = tf.nn.relu(mnist.full_layer(conv3_flat, 512))
full1_drop = tf.nn.dropout(full_1, keep_prob=keep_prob)
y_conv = mnist.full_layer(full1_drop, 10)
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y_conv, labels=y_))
train_step = tf.train.AdamOptimizer(1e-3).minimize(cross_entropy)
correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
def test(sess):
X = cifar.test.images.reshape(10, 1000, 32, 32, 3)
Y = cifar.test.labels.reshape(10, 1000, 10)
acc = np.mean([sess.run(accuracy, feed_dict={x: X[i], y_: Y[i], keep_prob: 1.0}) for i in range(10)])
print("Accuracy {:.4}%".format(acc * 100))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in range(mnist.NUM_STEPS):
batch = cifar.train.next_batch(mnist.MINIBATCH_SIZE)
sess.run(train_step, feed_dict={x: batch[0], y_: batch[1], keep_prob: 0.5})
test(sess)