Permalink
Browse files

init

  • Loading branch information...
0 parents commit aef7b7a8422fdb5d75be8a9365d241a1fef0d2bc @ry committed Nov 14, 2015
Showing with 1,694 additions and 0 deletions.
  1. +345 −0 VGG_2014_16.prototxt
  2. +245 −0 caffe_to_tensorflow.py
  3. BIN cat.jpg
  4. +1,000 −0 synset.txt
  5. +35 −0 tf_forward.py
  6. +69 −0 utils.py
@@ -0,0 +1,345 @@
+name: "VGG_ILSVRC_16_layers"
+input: "data"
+input_dim: 10
+input_dim: 3
+input_dim: 224
+input_dim: 224
+layer {
+ name: "conv1_1"
+ type: "Convolution"
+ bottom: "data"
+ top: "conv1_1a"
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu1_1"
+ type: "ReLU"
+ bottom: "conv1_1a"
+ top: "conv1_1"
+}
+layer {
+ name: "conv1_2"
+ type: "Convolution"
+ bottom: "conv1_1"
+ top: "conv1_2"
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu1_2"
+ type: "ReLU"
+ bottom: "conv1_2"
+ top: "conv1_2"
+}
+layer {
+ name: "pool1"
+ type: "Pooling"
+ bottom: "conv1_2"
+ top: "pool1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv2_1"
+ type: "Convolution"
+ bottom: "pool1"
+ top: "conv2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu2_1"
+ type: "ReLU"
+ bottom: "conv2_1"
+ top: "conv2_1"
+}
+layer {
+ name: "conv2_2"
+ type: "Convolution"
+ bottom: "conv2_1"
+ top: "conv2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu2_2"
+ type: "ReLU"
+ bottom: "conv2_2"
+ top: "conv2_2"
+}
+layer {
+ name: "pool2"
+ type: "Pooling"
+ bottom: "conv2_2"
+ top: "pool2"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv3_1"
+ type: "Convolution"
+ bottom: "pool2"
+ top: "conv3_1"
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu3_1"
+ type: "ReLU"
+ bottom: "conv3_1"
+ top: "conv3_1"
+}
+layer {
+ name: "conv3_2"
+ type: "Convolution"
+ bottom: "conv3_1"
+ top: "conv3_2"
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu3_2"
+ type: "ReLU"
+ bottom: "conv3_2"
+ top: "conv3_2"
+}
+layer {
+ name: "conv3_3"
+ type: "Convolution"
+ bottom: "conv3_2"
+ top: "conv3_3"
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu3_3"
+ type: "ReLU"
+ bottom: "conv3_3"
+ top: "conv3_3"
+}
+layer {
+ name: "pool3"
+ type: "Pooling"
+ bottom: "conv3_3"
+ top: "pool3"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv4_1"
+ type: "Convolution"
+ bottom: "pool3"
+ top: "conv4_1"
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu4_1"
+ type: "ReLU"
+ bottom: "conv4_1"
+ top: "conv4_1"
+}
+layer {
+ name: "conv4_2"
+ type: "Convolution"
+ bottom: "conv4_1"
+ top: "conv4_2"
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu4_2"
+ type: "ReLU"
+ bottom: "conv4_2"
+ top: "conv4_2"
+}
+layer {
+ name: "conv4_3"
+ type: "Convolution"
+ bottom: "conv4_2"
+ top: "conv4_3"
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu4_3"
+ type: "ReLU"
+ bottom: "conv4_3"
+ top: "conv4_3"
+}
+layer {
+ name: "pool4"
+ type: "Pooling"
+ bottom: "conv4_3"
+ top: "pool4"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv5_1"
+ type: "Convolution"
+ bottom: "pool4"
+ top: "conv5_1"
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu5_1"
+ type: "ReLU"
+ bottom: "conv5_1"
+ top: "conv5_1"
+}
+layer {
+ name: "conv5_2"
+ type: "Convolution"
+ bottom: "conv5_1"
+ top: "conv5_2"
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu5_2"
+ type: "ReLU"
+ bottom: "conv5_2"
+ top: "conv5_2"
+}
+layer {
+ name: "conv5_3"
+ type: "Convolution"
+ bottom: "conv5_2"
+ top: "conv5_3"
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu5_3"
+ type: "ReLU"
+ bottom: "conv5_3"
+ top: "conv5_3"
+}
+layer {
+ name: "pool5"
+ type: "Pooling"
+ bottom: "conv5_3"
+ top: "pool5"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "fc6"
+ type: "InnerProduct"
+ bottom: "pool5"
+ top: "fc6a"
+ inner_product_param {
+ num_output: 4096
+ }
+}
+layer {
+ name: "relu6"
+ type: "ReLU"
+ bottom: "fc6a"
+ top: "fc6"
+}
+layer {
+ name: "drop6"
+ type: "Dropout"
+ bottom: "fc6"
+ top: "fc6"
+ dropout_param {
+ dropout_ratio: 0.5
+ }
+}
+layer {
+ name: "fc7"
+ type: "InnerProduct"
+ bottom: "fc6"
+ top: "fc7"
+ inner_product_param {
+ num_output: 4096
+ }
+}
+layer {
+ name: "relu7"
+ type: "ReLU"
+ bottom: "fc7"
+ top: "fc7"
+}
+layer {
+ name: "drop7"
+ type: "Dropout"
+ bottom: "fc7"
+ top: "fc7"
+ dropout_param {
+ dropout_ratio: 0.5
+ }
+}
+layer {
+ name: "fc8"
+ type: "InnerProduct"
+ bottom: "fc7"
+ top: "fc8"
+ inner_product_param {
+ num_output: 1000
+ }
+}
+layer {
+ name: "prob"
+ type: "Softmax"
+ bottom: "fc8"
+ top: "prob"
+}
Oops, something went wrong.

0 comments on commit aef7b7a

Please sign in to comment.