Skip to content

Commit f15c696

Browse files
committed
identity block and convolutional block
1 parent f0c3230 commit f15c696

File tree

3 files changed

+149
-19
lines changed

3 files changed

+149
-19
lines changed

keras_image_classifier/library/residual_network.py

Lines changed: 0 additions & 18 deletions
This file was deleted.
Lines changed: 148 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,148 @@
1+
import numpy as np
2+
from keras import layers
3+
from keras.layers import Input, Add, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D, AveragePooling2D, MaxPooling2D, GlobalMaxPooling2D
4+
from keras.models import Model, load_model
5+
from keras.preprocessing import image
6+
from keras.utils import layer_utils
7+
from keras.utils.data_utils import get_file
8+
from keras.applications.imagenet_utils import preprocess_input
9+
from keras.utils.vis_utils import model_to_dot
10+
from keras.utils import plot_model
11+
from keras_image_classifier.resnets_utils import *
12+
from keras.initializers import glorot_uniform
13+
import scipy.misc
14+
import keras.backend as K
15+
import tensorflow as tf
16+
17+
K.set_image_data_format('channels_last')
18+
K.set_learning_phase(1)
19+
20+
21+
def identity_block(X, f, filters, stage, block):
22+
"""
23+
Implementation of the identity block as defined in Figure 3
24+
25+
Arguments:
26+
X -- input tensor of shape (m, n_H_prev, n_W_prev, n_C_prev)
27+
f -- integer, specifying the shape of the middle CONV's window for the main path
28+
filters -- python list of integers, defining the number of filters in the CONV layers of the main path
29+
stage -- integer, used to name the layers, depending on their position in the network
30+
block -- string/character, used to name the layers, depending on their position in the network
31+
32+
Returns:
33+
X -- output of the identity block, tensor of shape (n_H, n_W, n_C)
34+
"""
35+
36+
# defining name basis
37+
conv_name_base = 'res' + str(stage) + block + '_branch'
38+
bn_name_base = 'bn' + str(stage) + block + '_branch'
39+
40+
# Retrieve Filters
41+
F1, F2, F3 = filters
42+
43+
# Save the input value. You'll need this later to add back to the main path.
44+
X_shortcut = X
45+
46+
# First component of main path
47+
X = Conv2D(filters=F1, kernel_size=(1, 1), strides=(1, 1), padding='valid', name=conv_name_base + '2a',
48+
kernel_initializer=glorot_uniform(seed=0))(X)
49+
X = BatchNormalization(axis=3, name=bn_name_base + '2a')(X)
50+
X = Activation('relu')(X)
51+
52+
# Second component of main path (≈3 lines)
53+
X = Conv2D(filters=F2, kernel_size=(f, f), strides=(1, 1), padding='same', name=conv_name_base + '2b',
54+
kernel_initializer=glorot_uniform(seed=0))(X)
55+
X = BatchNormalization(axis=3, name=bn_name_base + '2b')(X)
56+
X = Activation('relu')(X)
57+
58+
# Third component of main path (≈2 lines)
59+
X = Conv2D(filters=F3, kernel_size=(1, 1), strides=(1, 1), padding='valid', name=conv_name_base + '2c',
60+
kernel_initializer=glorot_uniform(seed=0))(X)
61+
X = BatchNormalization(axis=3, name=bn_name_base + '2c')(X)
62+
63+
# Final step: Add shortcut value to main path, and pass it through a RELU activation (≈2 lines)
64+
X = Add()([X, X_shortcut])
65+
X = Activation('relu')(X)
66+
67+
return X
68+
69+
70+
def identity_block_test():
71+
tf.reset_default_graph()
72+
73+
with tf.Session() as test:
74+
np.random.seed(1)
75+
A_prev = tf.placeholder("float", [3, 4, 4, 6])
76+
X = np.random.randn(3, 4, 4, 6)
77+
A = identity_block(A_prev, f=2, filters=[2, 4, 6], stage=1, block='a')
78+
test.run(tf.global_variables_initializer())
79+
out = test.run([A], feed_dict={A_prev: X, K.learning_phase(): 0})
80+
print("out = " + str(out[0][1][1][0]))
81+
82+
83+
def convolutional_block(X, f, filters, stage, block, s=2):
84+
"""
85+
Implementation of the convolutional block as defined in Figure 4
86+
87+
Arguments:
88+
X -- input tensor of shape (m, n_H_prev, n_W_prev, n_C_prev)
89+
f -- integer, specifying the shape of the middle CONV's window for the main path
90+
filters -- python list of integers, defining the number of filters in the CONV layers of the main path
91+
stage -- integer, used to name the layers, depending on their position in the network
92+
block -- string/character, used to name the layers, depending on their position in the network
93+
s -- Integer, specifying the stride to be used
94+
95+
Returns:
96+
X -- output of the convolutional block, tensor of shape (n_H, n_W, n_C)
97+
"""
98+
99+
# defining name basis
100+
conv_name_base = 'res' + str(stage) + block + '_branch'
101+
bn_name_base = 'bn' + str(stage) + block + '_branch'
102+
103+
# Retrieve Filters
104+
F1, F2, F3 = filters
105+
106+
# Save the input value
107+
X_shortcut = X
108+
109+
# First component of main path
110+
X = Conv2D(F1, (1, 1), padding='valid', strides=(s, s), name=conv_name_base + '2a',
111+
kernel_initializer=glorot_uniform(seed=0))(X)
112+
X = BatchNormalization(axis=3, name=bn_name_base + '2a')(X)
113+
X = Activation('relu')(X)
114+
115+
# Second component of main path (≈3 lines)
116+
X = Conv2D(F2, (f, f), padding='same', strides=(1, 1), name=conv_name_base + '2b',
117+
kernel_initializer=glorot_uniform(seed=0))(X)
118+
X = BatchNormalization(axis=3, name=bn_name_base + '2b')(X)
119+
X = Activation('relu')(X)
120+
121+
# Third component of main path (≈2 lines)
122+
X = Conv2D(F3, (1, 1), padding='valid', strides=(1, 1), name=conv_name_base + '2c',
123+
kernel_initializer=glorot_uniform(seed=0))(X)
124+
X = BatchNormalization(axis=3, name=bn_name_base + '2c')(X)
125+
126+
##### SHORTCUT PATH #### (≈2 lines)
127+
X_shortcut = Conv2D(F3, (1, 1), padding='valid', strides=(s, s), name=conv_name_base + '1',
128+
kernel_initializer=glorot_uniform(seed=0))(X_shortcut)
129+
X_shortcut = BatchNormalization(axis=3, name=bn_name_base + '1')(X_shortcut)
130+
131+
# Final step: Add shortcut value to main path, and pass it through a RELU activation (≈2 lines)
132+
X = Add()([X, X_shortcut])
133+
X = Activation('relu')(X)
134+
135+
return X
136+
137+
138+
def convolutional_block_test():
139+
tf.reset_default_graph()
140+
141+
with tf.Session() as test:
142+
np.random.seed(1)
143+
A_prev = tf.placeholder("float", [3, 4, 4, 6])
144+
X = np.random.randn(3, 4, 4, 6)
145+
A = convolutional_block(A_prev, f=2, filters=[2, 4, 6], stage=1, block='a')
146+
test.run(tf.global_variables_initializer())
147+
out = test.run([A], feed_dict={A_prev: X, K.learning_phase(): 0})
148+
print("out = " + str(out[0][1][1][0]))

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,4 @@ numpy
55
h5py
66
pillow
77
scikit-learn
8-
https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-0.11.0-cp27-none-linux_x86_64.whl
8+
tensorflow

0 commit comments

Comments
 (0)