-
Notifications
You must be signed in to change notification settings - Fork 0
/
Lenet.py
143 lines (113 loc) · 5.78 KB
/
Lenet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@AUTHOR:Joselyn Zhao
@CONTACT:zhaojing17@foxmail.com
@HOME_PAGE:joselynzhao.top
@SOFTWERE:PyCharm
@FILE:Lenet.py
@TIME:2019/6/11 18:20
@DES:
'''
import tensorflow as tf
class Lenet():
def __init__(self,mu,sigma,lr=0.02,act = 'relu'):
self.mu = mu
self.sigma = sigma
self.lr = lr
self.activation = act # 默认是relu
self._build_graph()
def _build_graph(self,network_name = "Lenet"):
self._setup_placeholders_graph()
self._build_network_graph(network_name)
self._compute_loss_graph()
self._compute_acc_graph()
self._create_train_op_graph()
def _setup_placeholders_graph(self):
self.x = tf.placeholder("float",shape=[None,32,32,1],name='x')
self.y_ = tf.placeholder("float",shape = [None,10],name ="y_")
def _cnn_layer(self,scope_name,W_name,b_name,x,filter_shape,conv_stride,padding_tag="VALID",reuse=False):
with tf.variable_scope(scope_name) as scope:
if reuse:
scope.reuse_variables()
conv_W = tf.Variable(tf.truncated_normal(shape=filter_shape, mean=self.mu, stddev=self.sigma), name=W_name)
conv_b = tf.Variable(tf.zeros(filter_shape[3]),name=b_name)
conv = tf.nn.conv2d(x, conv_W, strides=conv_stride, padding=padding_tag) + conv_b
tf.summary.histogram("weights",conv_W)
tf.summary.histogram("biases",conv_b)
self._conv_visual(conv,conv_W,filter_shape) #可视化
return conv
def _pooling_layer(self,scope_name,x,pool_ksize,pool_strides,padding_tag="VALID",reuse=False):
with tf.variable_scope(scope_name) as scope:
if reuse:
scope.reuse_variables()
pool = tf.nn.max_pool(x, ksize=pool_ksize, strides=pool_strides, padding=padding_tag)
return pool
def _fully_connected_layer(self,scope_name,W_name,b_name,x,W_shape,reuse=False):
with tf.variable_scope(scope_name) as scope:
if reuse:
scope.reuse_variables()
fc_W = tf.Variable(tf.truncated_normal(shape=W_shape, mean=self.mu, stddev=self.sigma),name=W_name)
fc_b = tf.Variable(tf.zeros(W_shape[1]),name=b_name)
fc = tf.matmul(x, fc_W) + fc_b
tf.summary.histogram("weights",fc_W)
tf.summary.histogram("biases",fc_b)
self._full_visual(fc_W,W_shape) #可视化
return fc
def _build_network_graph(self,scope_name="Lenet"):
with tf.variable_scope(scope_name):
conv1 =self._cnn_layer("conv1","w1","b1",self.x,[5,5,1,6],[1, 1, 1, 1])
self.conv1 = self._activation_way(conv1)
self.pool1 = self._pooling_layer("pool1",self.conv1,[1, 2, 2, 1],[1, 2, 2, 1])
conv2 = self._cnn_layer("conv2","w2","b2",self.pool1,[5,5,6,16],[1, 1, 1, 1])
self.conv2 = self._activation_way(conv2)
self.pool2 = self._pooling_layer("pool2",self.conv2,[1, 2, 2, 1],[1, 2, 2, 1])
self.fc0 = self._flatten(self.pool2)
fc1 = self._fully_connected_layer("fc1","wfc1","bfc1",self.fc0,[400,120])
self.fc1 = self._activation_way(fc1)
fc2 = self._fully_connected_layer("fc2","wfc2","bfc2",self.fc1,[120,84])
self.fc2 = self._activation_way(fc2)
self.y = self._fully_connected_layer("fc3","wfc3","bfc3",self.fc2,[84,10])
tf.summary.histogram("ypredict",self.y)
def _activation_way(self,layer):
if (self.activation == "relu"):
layer = tf.nn.relu(layer)
elif (self.activation =="sigmoid"):
layer = tf.nn.sigmoid(layer)
return layer #返回激活后的层
def _flatten(self,conv):
conv1 = tf.reshape(conv, [-1, 400])
return conv1
def _compute_loss_graph(self):
with tf.name_scope("loss_function"):
loss = tf.nn.softmax_cross_entropy_with_logits(labels = self.y_,logits = self.y)
self.loss = tf.reduce_mean(loss)
tf.summary.scalar("loss",self.loss)
def _compute_acc_graph(self):
with tf.name_scope("acc_function"):
correct_prediction = tf.equal(tf.argmax(self.y,1),tf.argmax(self.y_,1))
self.accuracy = tf.reduce_mean(tf.cast(correct_prediction,tf.float32))
tf.summary.scalar("accuracy", self.accuracy)
def _create_train_op_graph(self):
with tf.name_scope("train_function"):
self.cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=self.y,labels=self.y_))
self.train_step = tf.train.AdamOptimizer(self.lr).minimize(self.cross_entropy)
tf.summary.scalar("cross_entropy",self.cross_entropy)
def _conv_visual(self,conv,conv_W, filter_shape):
with tf.name_scope('visual'):
x_min = tf.reduce_min(conv_W) #不清楚这个是什么意思
x_max = tf.reduce_max(conv_W)
kernel_0_to_1 = (conv_W - x_min) / (x_max - x_min)
kernel_transposed = tf.transpose(kernel_0_to_1, [3, 2, 0, 1])
conv_W_img = tf.reshape(kernel_transposed, [-1, filter_shape[0], filter_shape[1], 1])
tf.summary.image('conv_w', conv_W_img, max_outputs=filter_shape[3])
feature_img = conv[0:1, :, :, 0:filter_shape[3]]
feature_img = tf.transpose(feature_img, perm=[3, 1, 2, 0])
tf.summary.image('feature_conv', feature_img, max_outputs=filter_shape[3])
def _full_visual(self,fc_W, W_shape):
with tf.name_scope('visual'):
x_min = tf.reduce_min(fc_W)
x_max = tf.reduce_max(fc_W)
kernel_0_to_1 = (fc_W - x_min) / (x_max - x_min)
fc_W_img = tf.reshape(kernel_0_to_1, [-1, W_shape[0], W_shape[1], 1])
tf.summary.image('fc_w', fc_W_img, max_outputs=1)