This repository has been archived by the owner on Aug 5, 2022. It is now read-only.
/
resnet50_int8_full_conv.prototxt
29484 lines (29484 loc) · 874 KB
/
resnet50_int8_full_conv.prototxt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
name: "ResNet-50"
layer {
name: "data"
type: "DummyData"
top: "data"
dummy_data_param {
data_filler {
type: "constant"
value: 0.01
}
shape {
dim: 64
dim: 3
dim: 224
dim: 224
}
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
convolution_param {
num_output: 64
pad: 3
kernel_size: 7
stride: 2
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
value: 0.20000000298
}
}
quantization_param {
bw_layer_in: 8
bw_layer_out: 8
bw_params: 8
scale_in: 0.789830446243
scale_out: 8.12105941772
scale_params: 15080.0996094
scale_params: 99465.0859375
scale_params: 173115.5625
scale_params: 11128.5810547
scale_params: 14022.6171875
scale_params: 28568.96875
scale_params: 153647.71875
scale_params: 21552.6269531
scale_params: 23374.9042969
scale_params: 5475.21972656
scale_params: 20287.5507812
scale_params: 8116.69384766
scale_params: 8731.93066406
scale_params: 53525.2148438
scale_params: 15007.5283203
scale_params: 12752.8212891
scale_params: 37056.203125
scale_params: 58204.203125
scale_params: 6236.59960938
scale_params: 53042.1640625
scale_params: 12115.5107422
scale_params: 14664.6064453
scale_params: 50204.875
scale_params: 159775.84375
scale_params: 13167.9726562
scale_params: 41848.3125
scale_params: 54796.9609375
scale_params: 24042.1816406
scale_params: 14904.6445312
scale_params: 21783.1914062
scale_params: 23500.7773438
scale_params: 128000.28125
scale_params: 7732.32763672
scale_params: 36472.7929688
scale_params: 31271.7519531
scale_params: 250784.234375
scale_params: 9309.20507812
scale_params: 51890.1015625
scale_params: 29751.0058594
scale_params: 38078.7148438
scale_params: 17506.2539062
scale_params: 8578.92773438
scale_params: 25078.3964844
scale_params: 73881.96875
scale_params: 10138.1269531
scale_params: 81844.2109375
scale_params: 17890.7109375
scale_params: 17035.5859375
scale_params: 33184.5117188
scale_params: 10322.5791016
scale_params: 76040.84375
scale_params: 25666.6757812
scale_params: 32228.9179688
scale_params: 18662.8847656
scale_params: 46139.2304688
scale_params: 11391.8251953
scale_params: 81166.40625
scale_params: 9657.83007812
scale_params: 54063.6953125
scale_params: 8307.62109375
scale_params: 8215.99316406
scale_params: 68782.2421875
scale_params: 91589.109375
scale_params: 9809.52539062
is_negative_input: true
}
}
layer {
name: "bn_conv1"
type: "BatchNorm"
bottom: "conv1"
top: "conv1"
batch_norm_param {
}
}
layer {
name: "scale_conv1"
type: "Scale"
bottom: "conv1"
top: "conv1"
scale_param {
bias_term: true
}
}
layer {
name: "conv1_relu"
type: "ReLU"
bottom: "conv1"
top: "conv1"
relu_param {
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "res2a_branch1"
type: "Convolution"
bottom: "pool1"
top: "res2a_branch1"
convolution_param {
num_output: 256
bias_term: false
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
value: 0.20000000298
}
}
quantization_param {
bw_layer_in: 8
bw_layer_out: 8
bw_params: 8
scale_in: 8.12105941772
scale_out: 7.73090314865
scale_params: 3828.66894531
scale_params: 618.321228027
scale_params: 587.330749512
scale_params: 1369.48376465
scale_params: 814.807434082
scale_params: 1789.86340332
scale_params: 920.71697998
scale_params: 199.799423218
scale_params: 1450.02331543
scale_params: 575.434082031
scale_params: 384.107727051
scale_params: 3843.9753418
scale_params: 577.203063965
scale_params: 420.648864746
scale_params: 234.671737671
scale_params: 543.028076172
scale_params: 573.451843262
scale_params: 329.976257324
scale_params: 514.514404297
scale_params: 1706.61987305
scale_params: 1049.72265625
scale_params: 489.925231934
scale_params: 273.531463623
scale_params: 146.749908447
scale_params: 261.734436035
scale_params: 526.024902344
scale_params: 537.212036133
scale_params: 714.500671387
scale_params: 238.242721558
scale_params: 1328.52355957
scale_params: 186.795394897
scale_params: 3482.09008789
scale_params: 563.324157715
scale_params: 311.20526123
scale_params: 272.513427734
scale_params: 376.676452637
scale_params: 1690.90429688
scale_params: 371.645782471
scale_params: 319.967376709
scale_params: 1471.15222168
scale_params: 177.70149231
scale_params: 369.58996582
scale_params: 305.294586182
scale_params: 441.888916016
scale_params: 1084.99304199
scale_params: 550.773498535
scale_params: 560.963134766
scale_params: 521.900756836
scale_params: 210.17338562
scale_params: 236.662399292
scale_params: 2190.15039062
scale_params: 821.173461914
scale_params: 775.838989258
scale_params: 626.157287598
scale_params: 412.893707275
scale_params: 322.104705811
scale_params: 409.800018311
scale_params: 390.28527832
scale_params: 1059.69628906
scale_params: 340.560699463
scale_params: 255.07824707
scale_params: 1060.39343262
scale_params: 716.245422363
scale_params: 1808.01953125
scale_params: 178.31817627
scale_params: 385.139068604
scale_params: 169.164382935
scale_params: 443.104675293
scale_params: 947.276306152
scale_params: 295.337677002
scale_params: 562.836914062
scale_params: 1002.90093994
scale_params: 3527.50610352
scale_params: 399.81918335
scale_params: 483.519104004
scale_params: 8887.55175781
scale_params: 1260.28149414
scale_params: 117.425605774
scale_params: 253.14465332
scale_params: 123.947502136
scale_params: 432.127075195
scale_params: 1442.68371582
scale_params: 1017.52185059
scale_params: 244.547424316
scale_params: 403.563659668
scale_params: 499.992950439
scale_params: 564.49420166
scale_params: 246.912780762
scale_params: 512.768066406
scale_params: 967.212585449
scale_params: 158.648254395
scale_params: 420.730865479
scale_params: 211.38633728
scale_params: 416.336761475
scale_params: 698.515136719
scale_params: 314.289550781
scale_params: 553.191345215
scale_params: 1144.92443848
scale_params: 544.803649902
scale_params: 2448.44433594
scale_params: 654.004760742
scale_params: 666.644714355
scale_params: 1038.1973877
scale_params: 486.622131348
scale_params: 1200.72338867
scale_params: 95.9519882202
scale_params: 260.665679932
scale_params: 213.843002319
scale_params: 365.15246582
scale_params: 257.642883301
scale_params: 659.51385498
scale_params: 439.34564209
scale_params: 779.766540527
scale_params: 1673.7010498
scale_params: 429.212890625
scale_params: 537.288574219
scale_params: 203.874450684
scale_params: 801.684448242
scale_params: 1464.76245117
scale_params: 425.605194092
scale_params: 272.120513916
scale_params: 195.600418091
scale_params: 1187.5994873
scale_params: 109.027626038
scale_params: 158.489196777
scale_params: 506.32800293
scale_params: 142.486099243
scale_params: 414.021575928
scale_params: 393.532440186
scale_params: 318.269775391
scale_params: 1301.23205566
scale_params: 245.736206055
scale_params: 206.509613037
scale_params: 1103.89306641
scale_params: 277131.71875
scale_params: 788.98449707
scale_params: 322.844787598
scale_params: 560.177490234
scale_params: 498.093780518
scale_params: 772.19720459
scale_params: 1337.73632812
scale_params: 984.823364258
scale_params: 589.082214355
scale_params: 743.321594238
scale_params: 1245.66796875
scale_params: 212.157470703
scale_params: 500.03024292
scale_params: 103.530342102
scale_params: 1093.62963867
scale_params: 127.437927246
scale_params: 789.333190918
scale_params: 1030.48730469
scale_params: 131.424163818
scale_params: 1322.54785156
scale_params: 746.349487305
scale_params: 326.069732666
scale_params: 5814.01025391
scale_params: 5018.57666016
scale_params: 375.912353516
scale_params: 1040.49621582
scale_params: 1152.6027832
scale_params: 113.639846802
scale_params: 342.281768799
scale_params: 1272.02001953
scale_params: 268.378631592
scale_params: 353.730987549
scale_params: 428.639007568
scale_params: 427.328887939
scale_params: 528.016235352
scale_params: 271.881896973
scale_params: 618.630554199
scale_params: 363.260345459
scale_params: 812.010375977
scale_params: 333.443634033
scale_params: 1090.98632812
scale_params: 342.890258789
scale_params: 549.105651855
scale_params: 616.597961426
scale_params: 425.660644531
scale_params: 535.809753418
scale_params: 220.588012695
scale_params: 734.373413086
scale_params: 1452.54162598
scale_params: 2120.28930664
scale_params: 209.975296021
scale_params: 736.582824707
scale_params: 165.243743896
scale_params: 448.170440674
scale_params: 120.57673645
scale_params: 231.176269531
scale_params: 577.932312012
scale_params: 339.722930908
scale_params: 764.042907715
scale_params: 889.227478027
scale_params: 1363.8223877
scale_params: 451.16897583
scale_params: 1019.57049561
scale_params: 807.037414551
scale_params: 6748.09863281
scale_params: 352.978942871
scale_params: 474.225799561
scale_params: 205.063919067
scale_params: 3108.5012207
scale_params: 264.17401123
scale_params: 248.80809021
scale_params: 550.532470703
scale_params: 431.087310791
scale_params: 270.558410645
scale_params: 905.743835449
scale_params: 371.501831055
scale_params: 7559.11914062
scale_params: 795.854675293
scale_params: 657.826599121
scale_params: 751.862792969
scale_params: 341.940582275
scale_params: 438.733337402
scale_params: 9547.5859375
scale_params: 469.317138672
scale_params: 619.888671875
scale_params: 357.591552734
scale_params: 410.894897461
scale_params: 324.608642578
scale_params: 398.42755127
scale_params: 278.899932861
scale_params: 456.298126221
scale_params: 316.278015137
scale_params: 268.126190186
scale_params: 754.454650879
scale_params: 255.755935669
scale_params: 1370.49121094
scale_params: 993.676025391
scale_params: 319.8175354
scale_params: 849.072631836
scale_params: 3807.34008789
scale_params: 2119.58227539
scale_params: 313.335540771
scale_params: 793.53894043
scale_params: 527.812927246
scale_params: 349.45880127
scale_params: 549.562683105
scale_params: 478.788604736
scale_params: 1185.48901367
scale_params: 586.456726074
scale_params: 569.398803711
scale_params: 524.88079834
scale_params: 75.9232025146
scale_params: 407.049591064
scale_params: 347.98348999
scale_params: 930.729553223
scale_params: 297.369384766
scale_params: 6495.53271484
scale_params: 1748.1229248
scale_params: 3387.83105469
scale_params: 204.078979492
scale_params: 142.479232788
scale_params: 664.10748291
}
}
layer {
name: "bn2a_branch1"
type: "BatchNorm"
bottom: "res2a_branch1"
top: "res2a_branch1"
batch_norm_param {
}
}
layer {
name: "scale2a_branch1"
type: "Scale"
bottom: "res2a_branch1"
top: "res2a_branch1"
scale_param {
bias_term: true
}
}
layer {
name: "res2a_branch2a"
type: "Convolution"
bottom: "pool1"
top: "res2a_branch2a"
convolution_param {
num_output: 64
bias_term: false
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
value: 0.20000000298
}
}
quantization_param {
bw_layer_in: 8
bw_layer_out: 8
bw_params: 8
scale_in: 8.12105941772
scale_out: 21.6193752289
scale_params: 695.08380127
scale_params: 411.19354248
scale_params: 419.415222168
scale_params: 593.304077148
scale_params: 733.221862793
scale_params: 373.226959229
scale_params: 391.625335693
scale_params: 822.532409668
scale_params: 389.699951172
scale_params: 649.979858398
scale_params: 589.477416992
scale_params: 400.722564697
scale_params: 907.156555176
scale_params: 321.363372803
scale_params: 1182.46765137
scale_params: 566.082458496
scale_params: 449.189086914
scale_params: 451.429992676
scale_params: 299.026733398
scale_params: 415.015075684
scale_params: 535.050170898
scale_params: 339.691558838
scale_params: 301.874206543
scale_params: 480.931060791
scale_params: 373.453491211
scale_params: 218.039138794
scale_params: 365.197479248
scale_params: 296.92175293
scale_params: 345.503417969
scale_params: 210.931610107
scale_params: 482.314788818
scale_params: 537.323791504
scale_params: 266.434417725
scale_params: 553.1875
scale_params: 329.805084229
scale_params: 566.149902344
scale_params: 338.927276611
scale_params: 556.954162598
scale_params: 377.313812256
scale_params: 224.813430786
scale_params: 166.910369873
scale_params: 206.882034302
scale_params: 1631.98120117
scale_params: 413.789154053
scale_params: 298.516357422
scale_params: 157.396240234
scale_params: 374.282897949
scale_params: 779.105895996
scale_params: 510.992340088
scale_params: 209.781188965
scale_params: 535.766418457
scale_params: 306.58203125
scale_params: 216.153198242
scale_params: 151.085357666
scale_params: 508.375274658
scale_params: 330.496368408
scale_params: 601.443969727
scale_params: 596.136962891
scale_params: 153.200256348
scale_params: 355.340759277
scale_params: 530.736450195
scale_params: 390.325317383
scale_params: 296.371917725
scale_params: 434.643035889
}
}
layer {
name: "bn2a_branch2a"
type: "BatchNorm"
bottom: "res2a_branch2a"
top: "res2a_branch2a"
batch_norm_param {
}
}
layer {
name: "scale2a_branch2a"
type: "Scale"
bottom: "res2a_branch2a"
top: "res2a_branch2a"
scale_param {
bias_term: true
}
}
layer {
name: "res2a_branch2a_relu"
type: "ReLU"
bottom: "res2a_branch2a"
top: "res2a_branch2a"
relu_param {
}
}
layer {
name: "res2a_branch2b"
type: "Convolution"
bottom: "res2a_branch2a"
top: "res2a_branch2b"
convolution_param {
num_output: 64
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
value: 0.20000000298
}
}
quantization_param {
bw_layer_in: 8
bw_layer_out: 8
bw_params: 8
scale_in: 21.6193752289
scale_out: 20.5058460236
scale_params: 1121.42102051
scale_params: 708.39831543
scale_params: 962.02355957
scale_params: 1040.93664551
scale_params: 773.380249023
scale_params: 550.097229004
scale_params: 937.491577148
scale_params: 1531.5480957
scale_params: 1760.12158203
scale_params: 1492.20581055
scale_params: 672.248596191
scale_params: 1122.7253418
scale_params: 510.655029297
scale_params: 1296.98486328
scale_params: 547.13684082
scale_params: 1375.06481934
scale_params: 1437.6217041
scale_params: 1250.99047852
scale_params: 424.72958374
scale_params: 1660.33435059
scale_params: 774.949768066
scale_params: 832.550048828
scale_params: 711.316650391
scale_params: 939.714111328
scale_params: 625.579467773
scale_params: 416.323669434
scale_params: 819.169006348
scale_params: 1239.57714844
scale_params: 1370.93115234
scale_params: 2002.09863281
scale_params: 778.046386719
scale_params: 488.19329834
scale_params: 970.938781738
scale_params: 1279.6862793
scale_params: 649.571960449
scale_params: 977.585449219
scale_params: 1374.99597168
scale_params: 701.126281738
scale_params: 1191.04187012
scale_params: 823.521728516
scale_params: 650.578857422
scale_params: 2296.07519531
scale_params: 750.90045166
scale_params: 1421.80236816
scale_params: 873.258300781
scale_params: 780.386169434
scale_params: 1610.42590332
scale_params: 1084.50268555
scale_params: 880.402893066
scale_params: 532.145141602
scale_params: 1174.31274414
scale_params: 783.270202637
scale_params: 1476.43249512
scale_params: 1455.10827637
scale_params: 945.98840332
scale_params: 650.548156738
scale_params: 2028.63684082
scale_params: 1127.40795898
scale_params: 661.008789062
scale_params: 738.785522461
scale_params: 885.248596191
scale_params: 686.310668945
scale_params: 1795.27514648
scale_params: 815.777587891
}
}
layer {
name: "bn2a_branch2b"
type: "BatchNorm"
bottom: "res2a_branch2b"
top: "res2a_branch2b"
batch_norm_param {
}
}
layer {
name: "scale2a_branch2b"
type: "Scale"
bottom: "res2a_branch2b"
top: "res2a_branch2b"
scale_param {
bias_term: true
}
}
layer {
name: "res2a_branch2b_relu"
type: "ReLU"
bottom: "res2a_branch2b"
top: "res2a_branch2b"
relu_param {
}
}
layer {
name: "res2a_branch2c"
type: "Convolution"
bottom: "res2a_branch2b"
top: "res2a_branch2c"
convolution_param {
num_output: 256
bias_term: false
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
value: 0.20000000298
}
}
quantization_param {
bw_layer_in: 8
bw_layer_out: 8
bw_params: 8
scale_in: 20.5058460236
scale_out: 15.5226793289
scale_params: 621.090393066
scale_params: 612.051269531
scale_params: 291.46697998
scale_params: 526.05670166
scale_params: 602.146606445
scale_params: 548.633239746
scale_params: 326.257751465
scale_params: 248.807693481
scale_params: 493.045898438
scale_params: 937.873962402
scale_params: 398.665618896
scale_params: 1214.15136719
scale_params: 366.340270996
scale_params: 257.499206543
scale_params: 771.894897461
scale_params: 339.219482422
scale_params: 613.40625
scale_params: 217.525695801
scale_params: 197.408935547
scale_params: 425.230133057
scale_params: 311.971160889
scale_params: 568.343933105
scale_params: 421.345855713
scale_params: 196.585983276
scale_params: 444.844055176
scale_params: 389.483764648
scale_params: 232.955978394
scale_params: 982.776733398
scale_params: 261.319244385
scale_params: 773.660217285
scale_params: 237.973846436
scale_params: 2193.35131836
scale_params: 271.213806152
scale_params: 1386.41931152
scale_params: 1790.66943359
scale_params: 136.525375366
scale_params: 1592.18310547
scale_params: 500.280334473
scale_params: 349.865783691
scale_params: 759.824890137
scale_params: 519.655761719
scale_params: 195.854995728
scale_params: 601.106140137
scale_params: 630.411071777
scale_params: 473.52230835
scale_params: 131.739776611
scale_params: 382.171875
scale_params: 6148.69775391
scale_params: 167.069213867
scale_params: 247.891082764
scale_params: 1551.43457031
scale_params: 164.557693481
scale_params: 404.029144287
scale_params: 244.135910034
scale_params: 269.903839111
scale_params: 191.116821289
scale_params: 130.417526245
scale_params: 354.560058594
scale_params: 297.049163818
scale_params: 336.812561035
scale_params: 334.866851807
scale_params: 226.631408691
scale_params: 171.004486084
scale_params: 2042.4185791
scale_params: 558.257568359
scale_params: 180.317565918
scale_params: 181.795806885
scale_params: 283.473388672
scale_params: 808.496643066
scale_params: 742.463012695
scale_params: 400.181060791
scale_params: 810.904418945
scale_params: 1557.78869629
scale_params: 285.445831299
scale_params: 644.141540527
scale_params: 968.871643066
scale_params: 443.2578125
scale_params: 897.236572266
scale_params: 465.059417725
scale_params: 791.670654297
scale_params: 507.785217285
scale_params: 1005.61181641
scale_params: 235.566146851
scale_params: 267.268859863
scale_params: 247.560333252
scale_params: 987.586120605
scale_params: 563.932312012
scale_params: 382.896331787
scale_params: 123.963790894
scale_params: 879.996643066
scale_params: 1379.16357422
scale_params: 152.520095825
scale_params: 337.446624756
scale_params: 350.431549072
scale_params: 540.73248291
scale_params: 309.518157959
scale_params: 476.305084229
scale_params: 673.05255127
scale_params: 386.694946289
scale_params: 5247.37695312
scale_params: 169.708694458
scale_params: 1722.46826172
scale_params: 1184.18774414
scale_params: 245.766204834
scale_params: 829.660217285
scale_params: 156.56918335
scale_params: 282.364593506
scale_params: 151.227386475
scale_params: 492.758148193
scale_params: 788.917663574
scale_params: 392.911010742
scale_params: 454.713378906
scale_params: 586.483093262
scale_params: 772.264343262
scale_params: 271.788818359
scale_params: 573.316711426
scale_params: 550.65411377
scale_params: 731.298522949
scale_params: 633.938659668
scale_params: 474.619689941
scale_params: 270.204650879
scale_params: 263.278442383
scale_params: 1527.12414551
scale_params: 398.016967773
scale_params: 537.20098877
scale_params: 375.20211792
scale_params: 250.597702026
scale_params: 351.658935547
scale_params: 613.259765625
scale_params: 134.732925415
scale_params: 838.722473145
scale_params: 394.880279541
scale_params: 330.914489746
scale_params: 2011.99353027
scale_params: 1018.08782959
scale_params: 1681.28674316
scale_params: 272.017913818
scale_params: 278.02142334
scale_params: 475.616119385
scale_params: 790.315979004
scale_params: 253.716445923
scale_params: 661.830078125
scale_params: 569.590942383
scale_params: 347.275848389
scale_params: 271.195373535
scale_params: 404.130126953
scale_params: 407.824981689
scale_params: 971.409667969
scale_params: 318.721740723
scale_params: 685.844055176
scale_params: 486.800628662
scale_params: 585.727355957
scale_params: 452.894683838
scale_params: 1017.39471436
scale_params: 225.926177979
scale_params: 364.037200928
scale_params: 1110.46691895
scale_params: 917.610473633
scale_params: 292.808654785
scale_params: 306.593688965
scale_params: 230.455871582
scale_params: 279.88458252
scale_params: 323.107513428
scale_params: 438.104797363
scale_params: 193.24647522
scale_params: 237.107040405
scale_params: 517.036132812
scale_params: 385.173858643
scale_params: 203.719390869
scale_params: 235.506607056
scale_params: 253.467086792
scale_params: 176.221710205
scale_params: 406.019836426
scale_params: 4184.95458984
scale_params: 6892.43847656
scale_params: 580.018127441
scale_params: 306.796600342
scale_params: 728.452331543
scale_params: 384.817199707
scale_params: 129.378707886
scale_params: 595.522338867
scale_params: 350.931335449
scale_params: 598.399780273
scale_params: 1809.15148926
scale_params: 647.939208984
scale_params: 241.562454224
scale_params: 325.12097168
scale_params: 301.21887207
scale_params: 331.800628662
scale_params: 273.849487305
scale_params: 1696.07336426
scale_params: 198.569595337
scale_params: 702.32244873
scale_params: 242.835998535
scale_params: 1068.01306152
scale_params: 458.476837158
scale_params: 349.436950684
scale_params: 144.760162354
scale_params: 436.306091309
scale_params: 437.578948975
scale_params: 417.562927246
scale_params: 150.443023682
scale_params: 760.946533203
scale_params: 809.730712891
scale_params: 61196.203125
scale_params: 304.91583252
scale_params: 263.790252686
scale_params: 414.39654541
scale_params: 521.952209473
scale_params: 390.98236084
scale_params: 8452.56152344
scale_params: 686.904418945
scale_params: 72.1709442139
scale_params: 926.275695801
scale_params: 231.477050781
scale_params: 564.800292969
scale_params: 4142.37304688
scale_params: 353.029693604
scale_params: 557.154968262
scale_params: 312.292358398
scale_params: 272.156982422
scale_params: 355.890960693
scale_params: 343.815338135
scale_params: 1638.13439941
scale_params: 478.391906738
scale_params: 2612.11010742
scale_params: 198.523254395
scale_params: 447.50604248
scale_params: 276.998596191
scale_params: 312.614135742
scale_params: 692.745178223
scale_params: 160.575668335
scale_params: 1311.4967041
scale_params: 760.422058105
scale_params: 133.688583374
scale_params: 166.384246826
scale_params: 213.436187744
scale_params: 293.098419189
scale_params: 171.74029541
scale_params: 482.59664917
scale_params: 281.916046143
scale_params: 855.382568359
scale_params: 241.103042603
scale_params: 989.067443848
scale_params: 498.119567871
scale_params: 369.982513428
scale_params: 313.772766113
scale_params: 448.125183105
scale_params: 248.348648071
scale_params: 627.346374512
scale_params: 4630.62402344
scale_params: 1036.15893555
scale_params: 2267.00512695
scale_params: 521.396484375
scale_params: 811.815979004
scale_params: 1704.74377441
}
}
layer {
name: "bn2a_branch2c"
type: "BatchNorm"
bottom: "res2a_branch2c"
top: "res2a_branch2c"
batch_norm_param {
}
}
layer {
name: "scale2a_branch2c"
type: "Scale"
bottom: "res2a_branch2c"
top: "res2a_branch2c"
scale_param {
bias_term: true
}
}
layer {
name: "res2a"
type: "Eltwise"
bottom: "res2a_branch1"
bottom: "res2a_branch2c"
top: "res2a"
eltwise_param {
}
}
layer {
name: "res2a_relu"
type: "ReLU"
bottom: "res2a"
top: "res2a"
relu_param {
}
}
layer {
name: "res2b_branch2a"
type: "Convolution"
bottom: "res2a"
top: "res2b_branch2a"
convolution_param {