nvidia@tegra-ubuntu:~/caffe$ tools/caffe time --model=models/bvlc_alexnet/deploy.prototxt --gpu=0 I1012 16:57:37.385556 1308 caffe.cpp:470] This is NVCaffe 0.16.4 started at Thu Oct 12 16:57:36 2017 I1012 16:57:37.386247 1308 caffe.cpp:473] CuDNN version: 6021 I1012 16:57:37.386267 1308 caffe.cpp:474] CuBLAS version: 8000 I1012 16:57:37.386340 1308 caffe.cpp:475] CUDA version: 8000 I1012 16:57:37.386405 1308 caffe.cpp:476] CUDA driver version: 8000 I1012 16:57:37.487474 1308 gpu_memory.cpp:159] GPUMemory::Manager initialized with Caching (CUB) GPU Allocator I1012 16:57:37.487531 1308 gpu_memory.cpp:161] Total memory: 4174815232, Free: 855588864, dev_info[0]: total=4174815232 free=855588864 I1012 16:57:37.487682 1308 caffe.cpp:345] Use GPU with device ID 0 I1012 16:57:37.487723 1308 caffe.cpp:349] GPU 0: NVIDIA Tegra X1 I1012 16:57:37.497037 1308 solver.cpp:41] Solver data type: FLOAT I1012 16:57:37.497083 1308 solver.cpp:44] Initializing solver from parameters: test_interval: 51 base_lr: 0.01 display: 0 max_iter: 5 lr_policy: "fixed" random_seed: 1371 net_param { name: "AlexNet" layer { name: "data" type: "Input" top: "data" input_param { shape { dim: 10 dim: 3 dim: 227 dim: 227 } } } layer { name: "conv1" type: "Convolution" bottom: "data" top: "conv1" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 96 kernel_size: 11 stride: 4 } } layer { name: "relu1" type: "ReLU" bottom: "conv1" top: "conv1" } layer { name: "norm1" type: "LRN" bottom: "conv1" top: "norm1" lrn_param { local_size: 5 alpha: 0.0001 beta: 0.75 } } layer { name: "pool1" type: "Pooling" bottom: "norm1" top: "pool1" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "conv2" type: "Convolution" bottom: "pool1" top: "conv2" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 256 pad: 2 kernel_size: 5 group: 2 } } layer { name: "relu2" type: "ReLU" bottom: "conv2" top: "conv2" } layer { name: "norm2" type: "LRN" bottom: "conv2" top: "norm2" lrn_param { local_size: 5 alpha: 0.0001 beta: 0.75 } } layer { name: "pool2" type: "Pooling" bottom: "norm2" top: "pool2" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "conv3" type: "Convolution" bottom: "pool2" top: "conv3" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 384 pad: 1 kernel_size: 3 } } layer { name: "relu3" type: "ReLU" bottom: "conv3" top: "conv3" } layer { name: "conv4" type: "Convolution" bottom: "conv3" top: "conv4" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 384 pad: 1 kernel_size: 3 group: 2 } } layer { name: "relu4" type: "ReLU" bottom: "conv4" top: "conv4" } layer { name: "conv5" type: "Convolution" bottom: "conv4" top: "conv5" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 256 pad: 1 kernel_size: 3 group: 2 } } layer { name: "relu5" type: "ReLU" bottom: "conv5" top: "conv5" } layer { name: "pool5" type: "Pooling" bottom: "conv5" top: "pool5" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "fc6" type: "InnerProduct" bottom: "pool5" top: "fc6" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } inner_product_param { num_output: 4096 } } layer { name: "relu6" type: "ReLU" bottom: "fc6" top: "fc6" } layer { name: "drop6" type: "Dropout" bottom: "fc6" top: "fc6" dropout_param { dropout_ratio: 0.5 } } layer { name: "fc7" type: "InnerProduct" bottom: "fc6" top: "fc7" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } inner_product_param { num_output: 4096 } } layer { name: "relu7" type: "ReLU" bottom: "fc7" top: "fc7" } layer { name: "drop7" type: "Dropout" bottom: "fc7" top: "fc7" dropout_param { dropout_ratio: 0.5 } } layer { name: "fc8" type: "InnerProduct" bottom: "fc7" top: "fc8" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } inner_product_param { num_output: 1000 } } layer { name: "prob" type: "Softmax" bottom: "fc8" top: "prob" } } snapshot_after_train: false I1012 16:57:37.613420 1308 solver.cpp:80] Creating training net specified in net_param. I1012 16:57:37.613777 1308 net.cpp:70] Initializing net from parameters: name: "AlexNet" state { phase: TRAIN } layer { name: "data" type: "Input" top: "data" input_param { shape { dim: 10 dim: 3 dim: 227 dim: 227 } } } layer { name: "conv1" type: "Convolution" bottom: "data" top: "conv1" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 96 kernel_size: 11 stride: 4 } } layer { name: "relu1" type: "ReLU" bottom: "conv1" top: "conv1" } layer { name: "norm1" type: "LRN" bottom: "conv1" top: "norm1" lrn_param { local_size: 5 alpha: 0.0001 beta: 0.75 } } layer { name: "pool1" type: "Pooling" bottom: "norm1" top: "pool1" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "conv2" type: "Convolution" bottom: "pool1" top: "conv2" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 256 pad: 2 kernel_size: 5 group: 2 } } layer { name: "relu2" type: "ReLU" bottom: "conv2" top: "conv2" } layer { name: "norm2" type: "LRN" bottom: "conv2" top: "norm2" lrn_param { local_size: 5 alpha: 0.0001 beta: 0.75 } } layer { name: "pool2" type: "Pooling" bottom: "norm2" top: "pool2" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "conv3" type: "Convolution" bottom: "pool2" top: "conv3" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 384 pad: 1 kernel_size: 3 } } layer { name: "relu3" type: "ReLU" bottom: "conv3" top: "conv3" } layer { name: "conv4" type: "Convolution" bottom: "conv3" top: "conv4" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 384 pad: 1 kernel_size: 3 group: 2 } } layer { name: "relu4" type: "ReLU" bottom: "conv4" top: "conv4" } layer { name: "conv5" type: "Convolution" bottom: "conv4" top: "conv5" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 256 pad: 1 kernel_size: 3 group: 2 } } layer { name: "relu5" type: "ReLU" bottom: "conv5" top: "conv5" } layer { name: "pool5" type: "Pooling" bottom: "conv5" top: "pool5" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "fc6" type: "InnerProduct" bottom: "pool5" top: "fc6" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } inner_product_param { num_output: 4096 } } layer { name: "relu6" type: "ReLU" bottom: "fc6" top: "fc6" } layer { name: "drop6" type: "Dropout" bottom: "fc6" top: "fc6" dropout_param { dropout_ratio: 0.5 } } layer { name: "fc7" type: "InnerProduct" bottom: "fc6" top: "fc7" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } inner_product_param { num_output: 4096 } } layer { name: "relu7" type: "ReLU" bottom: "fc7" top: "fc7" } layer { name: "drop7" type: "Dropout" bottom: "fc7" top: "fc7" dropout_param { dropout_ratio: 0.5 } } layer { name: "fc8" type: "InnerProduct" bottom: "fc7" top: "fc8" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } inner_product_param { num_output: 1000 } } layer { name: "prob" type: "Softmax" bottom: "fc8" top: "prob" } I1012 16:57:37.614578 1308 net.cpp:102] Using FLOAT as default forward math type I1012 16:57:37.614608 1308 net.cpp:108] Using FLOAT as default backward math type I1012 16:57:37.614620 1308 layer_factory.hpp:136] Creating layer 'data' of type 'Input' I1012 16:57:37.614634 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:37.614665 1308 net.cpp:182] Created Layer data (0) I1012 16:57:37.614683 1308 net.cpp:528] data -> data I1012 16:57:37.615270 1308 net.cpp:243] Setting up data I1012 16:57:37.615298 1308 net.cpp:250] TRAIN Top shape for layer 0 'data' 10 3 227 227 (1545870) I1012 16:57:37.615339 1308 layer_factory.hpp:136] Creating layer 'conv1' of type 'Convolution' I1012 16:57:37.615353 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:37.615411 1308 net.cpp:182] Created Layer conv1 (1) I1012 16:57:37.615432 1308 net.cpp:559] conv1 <- data I1012 16:57:37.615448 1308 net.cpp:528] conv1 -> conv1 I1012 16:57:39.091891 1308 net.cpp:243] Setting up conv1 I1012 16:57:39.091931 1308 net.cpp:250] TRAIN Top shape for layer 1 'conv1' 10 96 55 55 (2904000) I1012 16:57:39.100736 1308 layer_factory.hpp:136] Creating layer 'relu1' of type 'ReLU' I1012 16:57:39.100764 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.100793 1308 net.cpp:182] Created Layer relu1 (2) I1012 16:57:39.100807 1308 net.cpp:559] relu1 <- conv1 I1012 16:57:39.100821 1308 net.cpp:511] relu1 -> conv1 (in-place) I1012 16:57:39.100863 1308 net.cpp:243] Setting up relu1 I1012 16:57:39.100874 1308 net.cpp:250] TRAIN Top shape for layer 2 'relu1' 10 96 55 55 (2904000) I1012 16:57:39.100893 1308 layer_factory.hpp:136] Creating layer 'norm1' of type 'LRN' I1012 16:57:39.100904 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.100957 1308 net.cpp:182] Created Layer norm1 (3) I1012 16:57:39.100971 1308 net.cpp:559] norm1 <- conv1 I1012 16:57:39.100983 1308 net.cpp:528] norm1 -> norm1 I1012 16:57:39.101164 1308 net.cpp:243] Setting up norm1 I1012 16:57:39.101182 1308 net.cpp:250] TRAIN Top shape for layer 3 'norm1' 10 96 55 55 (2904000) I1012 16:57:39.101200 1308 layer_factory.hpp:136] Creating layer 'pool1' of type 'Pooling' I1012 16:57:39.101213 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.101238 1308 net.cpp:182] Created Layer pool1 (4) I1012 16:57:39.101249 1308 net.cpp:559] pool1 <- norm1 I1012 16:57:39.101260 1308 net.cpp:528] pool1 -> pool1 I1012 16:57:39.101424 1308 net.cpp:243] Setting up pool1 I1012 16:57:39.101438 1308 net.cpp:250] TRAIN Top shape for layer 4 'pool1' 10 96 27 27 (699840) I1012 16:57:39.101457 1308 layer_factory.hpp:136] Creating layer 'conv2' of type 'Convolution' I1012 16:57:39.101470 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.101502 1308 net.cpp:182] Created Layer conv2 (5) I1012 16:57:39.101516 1308 net.cpp:559] conv2 <- pool1 I1012 16:57:39.101528 1308 net.cpp:528] conv2 -> conv2 I1012 16:57:39.104657 1308 net.cpp:243] Setting up conv2 I1012 16:57:39.104696 1308 net.cpp:250] TRAIN Top shape for layer 5 'conv2' 10 256 27 27 (1866240) I1012 16:57:39.104801 1308 layer_factory.hpp:136] Creating layer 'relu2' of type 'ReLU' I1012 16:57:39.104821 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.104847 1308 net.cpp:182] Created Layer relu2 (6) I1012 16:57:39.104862 1308 net.cpp:559] relu2 <- conv2 I1012 16:57:39.104878 1308 net.cpp:511] relu2 -> conv2 (in-place) I1012 16:57:39.104902 1308 net.cpp:243] Setting up relu2 I1012 16:57:39.104913 1308 net.cpp:250] TRAIN Top shape for layer 6 'relu2' 10 256 27 27 (1866240) I1012 16:57:39.104933 1308 layer_factory.hpp:136] Creating layer 'norm2' of type 'LRN' I1012 16:57:39.104944 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.104971 1308 net.cpp:182] Created Layer norm2 (7) I1012 16:57:39.104984 1308 net.cpp:559] norm2 <- conv2 I1012 16:57:39.104996 1308 net.cpp:528] norm2 -> norm2 I1012 16:57:39.105198 1308 net.cpp:243] Setting up norm2 I1012 16:57:39.105221 1308 net.cpp:250] TRAIN Top shape for layer 7 'norm2' 10 256 27 27 (1866240) I1012 16:57:39.105242 1308 layer_factory.hpp:136] Creating layer 'pool2' of type 'Pooling' I1012 16:57:39.105253 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.105280 1308 net.cpp:182] Created Layer pool2 (8) I1012 16:57:39.105296 1308 net.cpp:559] pool2 <- norm2 I1012 16:57:39.105314 1308 net.cpp:528] pool2 -> pool2 I1012 16:57:39.105492 1308 net.cpp:243] Setting up pool2 I1012 16:57:39.105515 1308 net.cpp:250] TRAIN Top shape for layer 8 'pool2' 10 256 13 13 (432640) I1012 16:57:39.105540 1308 layer_factory.hpp:136] Creating layer 'conv3' of type 'Convolution' I1012 16:57:39.105554 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.105590 1308 net.cpp:182] Created Layer conv3 (9) I1012 16:57:39.105605 1308 net.cpp:559] conv3 <- pool2 I1012 16:57:39.105621 1308 net.cpp:528] conv3 -> conv3 I1012 16:57:39.112797 1308 net.cpp:243] Setting up conv3 I1012 16:57:39.112833 1308 net.cpp:250] TRAIN Top shape for layer 9 'conv3' 10 384 13 13 (648960) I1012 16:57:39.112877 1308 layer_factory.hpp:136] Creating layer 'relu3' of type 'ReLU' I1012 16:57:39.112893 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.112918 1308 net.cpp:182] Created Layer relu3 (10) I1012 16:57:39.112931 1308 net.cpp:559] relu3 <- conv3 I1012 16:57:39.112946 1308 net.cpp:511] relu3 -> conv3 (in-place) I1012 16:57:39.112967 1308 net.cpp:243] Setting up relu3 I1012 16:57:39.112977 1308 net.cpp:250] TRAIN Top shape for layer 10 'relu3' 10 384 13 13 (648960) I1012 16:57:39.112993 1308 layer_factory.hpp:136] Creating layer 'conv4' of type 'Convolution' I1012 16:57:39.113005 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.113036 1308 net.cpp:182] Created Layer conv4 (11) I1012 16:57:39.113049 1308 net.cpp:559] conv4 <- conv3 I1012 16:57:39.113060 1308 net.cpp:528] conv4 -> conv4 I1012 16:57:39.116678 1308 net.cpp:243] Setting up conv4 I1012 16:57:39.116708 1308 net.cpp:250] TRAIN Top shape for layer 11 'conv4' 10 384 13 13 (648960) I1012 16:57:39.116741 1308 layer_factory.hpp:136] Creating layer 'relu4' of type 'ReLU' I1012 16:57:39.116755 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.116775 1308 net.cpp:182] Created Layer relu4 (12) I1012 16:57:39.116787 1308 net.cpp:559] relu4 <- conv4 I1012 16:57:39.116801 1308 net.cpp:511] relu4 -> conv4 (in-place) I1012 16:57:39.116819 1308 net.cpp:243] Setting up relu4 I1012 16:57:39.116829 1308 net.cpp:250] TRAIN Top shape for layer 12 'relu4' 10 384 13 13 (648960) I1012 16:57:39.116844 1308 layer_factory.hpp:136] Creating layer 'conv5' of type 'Convolution' I1012 16:57:39.116854 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.116935 1308 net.cpp:182] Created Layer conv5 (13) I1012 16:57:39.116948 1308 net.cpp:559] conv5 <- conv4 I1012 16:57:39.116961 1308 net.cpp:528] conv5 -> conv5 I1012 16:57:39.119928 1308 net.cpp:243] Setting up conv5 I1012 16:57:39.119964 1308 net.cpp:250] TRAIN Top shape for layer 13 'conv5' 10 256 13 13 (432640) I1012 16:57:39.120010 1308 layer_factory.hpp:136] Creating layer 'relu5' of type 'ReLU' I1012 16:57:39.120028 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.120051 1308 net.cpp:182] Created Layer relu5 (14) I1012 16:57:39.120066 1308 net.cpp:559] relu5 <- conv5 I1012 16:57:39.120081 1308 net.cpp:511] relu5 -> conv5 (in-place) I1012 16:57:39.120103 1308 net.cpp:243] Setting up relu5 I1012 16:57:39.120115 1308 net.cpp:250] TRAIN Top shape for layer 14 'relu5' 10 256 13 13 (432640) I1012 16:57:39.120132 1308 layer_factory.hpp:136] Creating layer 'pool5' of type 'Pooling' I1012 16:57:39.120146 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.120167 1308 net.cpp:182] Created Layer pool5 (15) I1012 16:57:39.120180 1308 net.cpp:559] pool5 <- conv5 I1012 16:57:39.120194 1308 net.cpp:528] pool5 -> pool5 I1012 16:57:39.120398 1308 net.cpp:243] Setting up pool5 I1012 16:57:39.120417 1308 net.cpp:250] TRAIN Top shape for layer 15 'pool5' 10 256 6 6 (92160) I1012 16:57:39.120436 1308 layer_factory.hpp:136] Creating layer 'fc6' of type 'InnerProduct' I1012 16:57:39.120450 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.120478 1308 net.cpp:182] Created Layer fc6 (16) I1012 16:57:39.120492 1308 net.cpp:559] fc6 <- pool5 I1012 16:57:39.120507 1308 net.cpp:528] fc6 -> fc6 I1012 16:57:39.245911 1308 net.cpp:243] Setting up fc6 I1012 16:57:39.245949 1308 net.cpp:250] TRAIN Top shape for layer 16 'fc6' 10 4096 (40960) I1012 16:57:39.245988 1308 layer_factory.hpp:136] Creating layer 'relu6' of type 'ReLU' I1012 16:57:39.246006 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.246031 1308 net.cpp:182] Created Layer relu6 (17) I1012 16:57:39.246047 1308 net.cpp:559] relu6 <- fc6 I1012 16:57:39.246062 1308 net.cpp:511] relu6 -> fc6 (in-place) I1012 16:57:39.246083 1308 net.cpp:243] Setting up relu6 I1012 16:57:39.246095 1308 net.cpp:250] TRAIN Top shape for layer 17 'relu6' 10 4096 (40960) I1012 16:57:39.246111 1308 layer_factory.hpp:136] Creating layer 'drop6' of type 'Dropout' I1012 16:57:39.246125 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.246160 1308 net.cpp:182] Created Layer drop6 (18) I1012 16:57:39.246172 1308 net.cpp:559] drop6 <- fc6 I1012 16:57:39.246184 1308 net.cpp:511] drop6 -> fc6 (in-place) I1012 16:57:39.267174 1308 net.cpp:243] Setting up drop6 I1012 16:57:39.267215 1308 net.cpp:250] TRAIN Top shape for layer 18 'drop6' 10 4096 (40960) I1012 16:57:39.267244 1308 layer_factory.hpp:136] Creating layer 'fc7' of type 'InnerProduct' I1012 16:57:39.267258 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:39.267285 1308 net.cpp:182] Created Layer fc7 (19) I1012 16:57:39.267298 1308 net.cpp:559] fc7 <- fc6 I1012 16:57:39.267313 1308 net.cpp:528] fc7 -> fc7 I1012 16:57:40.232806 1308 net.cpp:243] Setting up fc7 I1012 16:57:40.233053 1308 net.cpp:250] TRAIN Top shape for layer 19 'fc7' 10 4096 (40960) I1012 16:57:40.233150 1308 layer_factory.hpp:136] Creating layer 'relu7' of type 'ReLU' I1012 16:57:40.233224 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:40.233302 1308 net.cpp:182] Created Layer relu7 (20) I1012 16:57:40.233371 1308 net.cpp:559] relu7 <- fc7 I1012 16:57:40.233440 1308 net.cpp:511] relu7 -> fc7 (in-place) I1012 16:57:40.233518 1308 net.cpp:243] Setting up relu7 I1012 16:57:40.233587 1308 net.cpp:250] TRAIN Top shape for layer 20 'relu7' 10 4096 (40960) I1012 16:57:40.233768 1308 layer_factory.hpp:136] Creating layer 'drop7' of type 'Dropout' I1012 16:57:40.233844 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:40.233927 1308 net.cpp:182] Created Layer drop7 (21) I1012 16:57:40.234005 1308 net.cpp:559] drop7 <- fc7 I1012 16:57:40.234077 1308 net.cpp:511] drop7 -> fc7 (in-place) I1012 16:57:41.882411 1308 net.cpp:243] Setting up drop7 I1012 16:57:41.882452 1308 net.cpp:250] TRAIN Top shape for layer 21 'drop7' 10 4096 (40960) I1012 16:57:41.882488 1308 layer_factory.hpp:136] Creating layer 'fc8' of type 'InnerProduct' I1012 16:57:41.882506 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:41.882537 1308 net.cpp:182] Created Layer fc8 (22) I1012 16:57:41.882555 1308 net.cpp:559] fc8 <- fc7 I1012 16:57:41.882573 1308 net.cpp:528] fc8 -> fc8 I1012 16:57:41.921139 1308 net.cpp:243] Setting up fc8 I1012 16:57:41.921176 1308 net.cpp:250] TRAIN Top shape for layer 22 'fc8' 10 1000 (10000) I1012 16:57:41.921223 1308 layer_factory.hpp:136] Creating layer 'prob' of type 'Softmax' I1012 16:57:41.921244 1308 layer_factory.hpp:148] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I1012 16:57:41.921294 1308 net.cpp:182] Created Layer prob (23) I1012 16:57:41.921314 1308 net.cpp:559] prob <- fc8 I1012 16:57:41.921331 1308 net.cpp:528] prob -> prob I1012 16:57:41.921720 1308 net.cpp:243] Setting up prob I1012 16:57:41.921746 1308 net.cpp:250] TRAIN Top shape for layer 23 'prob' 10 1000 (10000) I1012 16:57:41.921772 1308 net.cpp:323] prob does not need backward computation. I1012 16:57:41.921787 1308 net.cpp:323] fc8 does not need backward computation. I1012 16:57:41.921800 1308 net.cpp:323] drop7 does not need backward computation. I1012 16:57:41.921813 1308 net.cpp:323] relu7 does not need backward computation. I1012 16:57:41.921824 1308 net.cpp:323] fc7 does not need backward computation. I1012 16:57:41.921836 1308 net.cpp:323] drop6 does not need backward computation. I1012 16:57:41.921847 1308 net.cpp:323] relu6 does not need backward computation. I1012 16:57:41.921859 1308 net.cpp:323] fc6 does not need backward computation. I1012 16:57:41.921870 1308 net.cpp:323] pool5 does not need backward computation. I1012 16:57:41.921881 1308 net.cpp:323] relu5 does not need backward computation. I1012 16:57:41.921893 1308 net.cpp:323] conv5 does not need backward computation. I1012 16:57:41.921905 1308 net.cpp:323] relu4 does not need backward computation. I1012 16:57:41.921916 1308 net.cpp:323] conv4 does not need backward computation. I1012 16:57:41.921927 1308 net.cpp:323] relu3 does not need backward computation. I1012 16:57:41.921938 1308 net.cpp:323] conv3 does not need backward computation. I1012 16:57:41.921952 1308 net.cpp:323] pool2 does not need backward computation. I1012 16:57:41.921962 1308 net.cpp:323] norm2 does not need backward computation. I1012 16:57:41.921974 1308 net.cpp:323] relu2 does not need backward computation. I1012 16:57:41.921985 1308 net.cpp:323] conv2 does not need backward computation. I1012 16:57:41.921998 1308 net.cpp:323] pool1 does not need backward computation. I1012 16:57:41.922009 1308 net.cpp:323] norm1 does not need backward computation. I1012 16:57:41.922020 1308 net.cpp:323] relu1 does not need backward computation. I1012 16:57:41.922032 1308 net.cpp:323] conv1 does not need backward computation. I1012 16:57:41.922044 1308 net.cpp:323] data does not need backward computation. I1012 16:57:41.922055 1308 net.cpp:365] This network produces output prob I1012 16:57:41.922112 1308 net.cpp:387] Top memory (TRAIN) required for data: 83232440 diff: 83232440 I1012 16:57:41.922127 1308 net.cpp:390] Bottom memory (TRAIN) required for data: 83192440 diff: 83192440 I1012 16:57:41.922137 1308 net.cpp:393] Shared (in-place) memory (TRAIN) by data: 26658560 diff: 26658560 I1012 16:57:41.922148 1308 net.cpp:396] Parameters memory (TRAIN) required for data: 243860896 diff: 243860896 I1012 16:57:41.922221 1308 net.cpp:399] Parameters shared memory (TRAIN) by data: 0 diff: 0 I1012 16:57:41.922235 1308 net.cpp:405] Network initialization done. I1012 16:57:41.922416 1308 solver.cpp:55] Solver scaffolding done. I1012 16:57:41.924718 1308 caffe.cpp:378] Initialization for 5 iterations. I1012 16:57:41.924752 1308 caffe.cpp:381] Performing initial Forward/Backward I1012 16:57:51.851292 1308 net.cpp:1358] [0] Reserving 243861248 bytes of shared learnable space I1012 16:57:58.564569 1308 solver.cpp:252] Initial Test completed I1012 16:58:32.052376 1308 cudnn_conv_layer.cpp:874] [0] Conv Algos (F,BD,BF): 'conv1' with space 0.23G 3/1 1 0 3 (avail 0.32G, req 0G) t: 0 18.18 9.1 I1012 16:58:40.198628 1308 cudnn_conv_layer.cpp:874] [0] Conv Algos (F,BD,BF): 'conv2' with space 0.28G 96/2 7 5 5 (avail 0.31G, req 0.04G) t: 0 4.62 4.41 I1012 16:58:41.997225 1308 cudnn_conv_layer.cpp:874] [0] Conv Algos (F,BD,BF): 'conv3' with space 0.28G 256/1 1 5 5 (avail 0.31G, req 0.04G) t: 0 7.59 5.98 I1012 16:58:43.289158 1308 cudnn_conv_layer.cpp:874] [0] Conv Algos (F,BD,BF): 'conv4' with space 0.28G 384/2 1 1 5 (avail 0.32G, req 0.04G) t: 0 3.4 2.59 I1012 16:58:45.030541 1308 cudnn_conv_layer.cpp:874] [0] Conv Algos (F,BD,BF): 'conv5' with space 0.29G 384/2 1 1 5 (avail 0.33G, req 0.04G) t: 0 2.26 1.88 I1012 16:58:45.323537 1308 caffe.cpp:389] Initial Forward/Backward complete I1012 16:58:45.323606 1308 caffe.cpp:390] Average Initialization Forward/Backward pass: 12679.8 ms. I1012 16:58:45.323659 1308 caffe.cpp:393] *** Benchmark begins *** I1012 16:58:45.323679 1308 caffe.cpp:394] Testing for 50 iterations. I1012 16:58:49.616600 1308 caffe.cpp:420] Iteration: 1 forward-backward time: 4292.89 ms. I1012 16:58:49.779598 1308 caffe.cpp:420] Iteration: 2 forward-backward time: 162.844 ms. I1012 16:58:49.939838 1308 caffe.cpp:420] Iteration: 3 forward-backward time: 160.15 ms. I1012 16:58:50.032860 1308 cudnn_conv_layer.cpp:476] [0] Layer 'conv1' reallocating workspace 0.29G to 0.08G I1012 16:58:50.244724 1308 caffe.cpp:420] Iteration: 4 forward-backward time: 304.798 ms. I1012 16:58:50.407826 1308 caffe.cpp:420] Iteration: 5 forward-backward time: 162.991 ms. I1012 16:58:50.573175 1308 caffe.cpp:420] Iteration: 6 forward-backward time: 165.27 ms. I1012 16:58:50.736397 1308 caffe.cpp:420] Iteration: 7 forward-backward time: 163.14 ms. I1012 16:58:50.896262 1308 caffe.cpp:420] Iteration: 8 forward-backward time: 159.781 ms. I1012 16:58:51.052738 1308 caffe.cpp:420] Iteration: 9 forward-backward time: 156.396 ms. I1012 16:58:51.209390 1308 caffe.cpp:420] Iteration: 10 forward-backward time: 156.574 ms. I1012 16:58:51.370856 1308 caffe.cpp:420] Iteration: 11 forward-backward time: 161.381 ms. I1012 16:58:51.531365 1308 caffe.cpp:420] Iteration: 12 forward-backward time: 160.426 ms. I1012 16:58:51.692194 1308 caffe.cpp:420] Iteration: 13 forward-backward time: 160.749 ms. I1012 16:58:51.854077 1308 caffe.cpp:420] Iteration: 14 forward-backward time: 161.799 ms. I1012 16:58:52.011871 1308 caffe.cpp:420] Iteration: 15 forward-backward time: 157.711 ms. I1012 16:58:52.170927 1308 caffe.cpp:420] Iteration: 16 forward-backward time: 158.977 ms. I1012 16:58:52.328317 1308 caffe.cpp:420] Iteration: 17 forward-backward time: 157.303 ms. I1012 16:58:52.490939 1308 caffe.cpp:420] Iteration: 18 forward-backward time: 162.539 ms. I1012 16:58:52.654081 1308 caffe.cpp:420] Iteration: 19 forward-backward time: 163.053 ms. I1012 16:58:52.811235 1308 caffe.cpp:420] Iteration: 20 forward-backward time: 157.067 ms. I1012 16:58:52.968307 1308 caffe.cpp:420] Iteration: 21 forward-backward time: 156.981 ms. I1012 16:58:53.127748 1308 caffe.cpp:420] Iteration: 22 forward-backward time: 159.365 ms. I1012 16:58:53.291807 1308 caffe.cpp:420] Iteration: 23 forward-backward time: 163.977 ms. I1012 16:58:53.453835 1308 caffe.cpp:420] Iteration: 24 forward-backward time: 161.946 ms. I1012 16:58:53.612182 1308 caffe.cpp:420] Iteration: 25 forward-backward time: 158.267 ms. I1012 16:58:53.774389 1308 caffe.cpp:420] Iteration: 26 forward-backward time: 162.128 ms. I1012 16:58:53.940058 1308 caffe.cpp:420] Iteration: 27 forward-backward time: 165.586 ms. I1012 16:58:54.102669 1308 caffe.cpp:420] Iteration: 28 forward-backward time: 162.525 ms. I1012 16:58:54.261713 1308 caffe.cpp:420] Iteration: 29 forward-backward time: 158.965 ms. I1012 16:58:54.417552 1308 caffe.cpp:420] Iteration: 30 forward-backward time: 155.756 ms. I1012 16:58:54.580036 1308 caffe.cpp:420] Iteration: 31 forward-backward time: 162.407 ms. I1012 16:58:54.744354 1308 caffe.cpp:420] Iteration: 32 forward-backward time: 164.219 ms. I1012 16:58:54.903772 1308 caffe.cpp:420] Iteration: 33 forward-backward time: 159.324 ms. I1012 16:58:55.066742 1308 caffe.cpp:420] Iteration: 34 forward-backward time: 162.873 ms. I1012 16:58:55.231147 1308 caffe.cpp:420] Iteration: 35 forward-backward time: 160.534 ms. I1012 16:58:55.390959 1308 caffe.cpp:420] Iteration: 36 forward-backward time: 159.73 ms. I1012 16:58:55.556856 1308 caffe.cpp:420] Iteration: 37 forward-backward time: 165.818 ms. I1012 16:58:55.726534 1308 caffe.cpp:420] Iteration: 38 forward-backward time: 169.599 ms. I1012 16:58:55.889091 1308 caffe.cpp:420] Iteration: 39 forward-backward time: 162.471 ms. I1012 16:58:56.052461 1308 caffe.cpp:420] Iteration: 40 forward-backward time: 163.288 ms. I1012 16:58:56.213708 1308 caffe.cpp:420] Iteration: 41 forward-backward time: 161.171 ms. I1012 16:58:56.380609 1308 caffe.cpp:420] Iteration: 42 forward-backward time: 166.814 ms. I1012 16:58:56.543259 1308 caffe.cpp:420] Iteration: 43 forward-backward time: 162.57 ms. I1012 16:58:56.708200 1308 caffe.cpp:420] Iteration: 44 forward-backward time: 164.869 ms. I1012 16:58:56.879235 1308 caffe.cpp:420] Iteration: 45 forward-backward time: 170.94 ms. I1012 16:58:57.039705 1308 caffe.cpp:420] Iteration: 46 forward-backward time: 160.382 ms. I1012 16:58:57.202849 1308 caffe.cpp:420] Iteration: 47 forward-backward time: 163.068 ms. I1012 16:58:57.360524 1308 caffe.cpp:420] Iteration: 48 forward-backward time: 157.592 ms. I1012 16:58:57.534188 1308 caffe.cpp:420] Iteration: 49 forward-backward time: 173.55 ms. I1012 16:58:57.695271 1308 caffe.cpp:420] Iteration: 50 forward-backward time: 160.969 ms. I1012 16:58:57.695392 1308 caffe.cpp:423] Average time per layer: I1012 16:58:57.695415 1308 caffe.cpp:426] data forward: 0.0237094 ms. I1012 16:58:57.695439 1308 caffe.cpp:429] data backward: 0.0206136 ms. I1012 16:58:57.695464 1308 caffe.cpp:426] conv1 forward: 12.018 ms. I1012 16:58:57.695487 1308 caffe.cpp:429] conv1 backward: 10.4293 ms. I1012 16:58:57.695510 1308 caffe.cpp:426] relu1 forward: 1.5581 ms. I1012 16:58:57.695530 1308 caffe.cpp:429] relu1 backward: 0.0203073 ms. I1012 16:58:57.695551 1308 caffe.cpp:426] norm1 forward: 1.62364 ms. I1012 16:58:57.695574 1308 caffe.cpp:429] norm1 backward: 67.8034 ms. I1012 16:58:57.695601 1308 caffe.cpp:426] pool1 forward: 1.98889 ms. I1012 16:58:57.695624 1308 caffe.cpp:429] pool1 backward: 0.0208448 ms. I1012 16:58:57.695647 1308 caffe.cpp:426] conv2 forward: 16.0531 ms. I1012 16:58:57.695669 1308 caffe.cpp:429] conv2 backward: 10.0553 ms. I1012 16:58:57.695696 1308 caffe.cpp:426] relu2 forward: 1.6879 ms. I1012 16:58:57.695719 1308 caffe.cpp:429] relu2 backward: 0.0204355 ms. I1012 16:58:57.695746 1308 caffe.cpp:426] norm2 forward: 1.47183 ms. I1012 16:58:57.695770 1308 caffe.cpp:429] norm2 backward: 20.0707 ms. I1012 16:58:57.695794 1308 caffe.cpp:426] pool2 forward: 1.69055 ms. I1012 16:58:57.695816 1308 caffe.cpp:429] pool2 backward: 0.0209583 ms. I1012 16:58:57.695837 1308 caffe.cpp:426] conv3 forward: 10.8848 ms. I1012 16:58:57.695860 1308 caffe.cpp:429] conv3 backward: 6.60752 ms. I1012 16:58:57.695886 1308 caffe.cpp:426] relu3 forward: 0.679425 ms. I1012 16:58:57.695910 1308 caffe.cpp:429] relu3 backward: 0.0214624 ms. I1012 16:58:57.695931 1308 caffe.cpp:426] conv4 forward: 10.8118 ms. I1012 16:58:57.695953 1308 caffe.cpp:429] conv4 backward: 5.7677 ms. I1012 16:58:57.695976 1308 caffe.cpp:426] relu4 forward: 0.765893 ms. I1012 16:58:57.696003 1308 caffe.cpp:429] relu4 backward: 0.0211448 ms. I1012 16:58:57.696027 1308 caffe.cpp:426] conv5 forward: 5.21517 ms. I1012 16:58:57.696053 1308 caffe.cpp:429] conv5 backward: 4.37746 ms. I1012 16:58:57.696077 1308 caffe.cpp:426] relu5 forward: 0.469864 ms. I1012 16:58:57.696099 1308 caffe.cpp:429] relu5 backward: 0.0195157 ms. I1012 16:58:57.696121 1308 caffe.cpp:426] pool5 forward: 0.696802 ms. I1012 16:58:57.696143 1308 caffe.cpp:429] pool5 backward: 0.0203343 ms. I1012 16:58:57.696166 1308 caffe.cpp:426] fc6 forward: 11.4619 ms. I1012 16:58:57.696247 1308 caffe.cpp:429] fc6 backward: 20.4895 ms. I1012 16:58:57.696276 1308 caffe.cpp:426] relu6 forward: 0.205359 ms. I1012 16:58:57.696302 1308 caffe.cpp:429] relu6 backward: 0.01905 ms. I1012 16:58:57.696331 1308 caffe.cpp:426] drop6 forward: 0.223821 ms. I1012 16:58:57.696354 1308 caffe.cpp:429] drop6 backward: 0.0285229 ms. I1012 16:58:57.696379 1308 caffe.cpp:426] fc7 forward: 4.8605 ms. I1012 16:58:57.696401 1308 caffe.cpp:429] fc7 backward: 9.30679 ms. I1012 16:58:57.696424 1308 caffe.cpp:426] relu7 forward: 0.21133 ms. I1012 16:58:57.696447 1308 caffe.cpp:429] relu7 backward: 0.0201115 ms. I1012 16:58:57.696475 1308 caffe.cpp:426] drop7 forward: 0.25403 ms. I1012 16:58:57.696498 1308 caffe.cpp:429] drop7 backward: 0.088653 ms. I1012 16:58:57.696521 1308 caffe.cpp:426] fc8 forward: 1.76348 ms. I1012 16:58:57.696543 1308 caffe.cpp:429] fc8 backward: 2.70025 ms. I1012 16:58:57.696564 1308 caffe.cpp:426] prob forward: 0.257567 ms. I1012 16:58:57.696586 1308 caffe.cpp:429] prob backward: 0.0245438 ms. I1012 16:58:57.696699 1308 caffe.cpp:434] Average Forward pass: 88.3707 ms. I1012 16:58:57.696732 1308 caffe.cpp:436] Average Backward pass: 158.784 ms. I1012 16:58:57.696754 1308 caffe.cpp:438] Average Forward-Backward: 247.46 ms. I1012 16:58:57.696784 1308 caffe.cpp:440] Total Time: 12373 ms. I1012 16:58:57.696811 1308 caffe.cpp:441] *** Benchmark ends ***