diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..51ef049 --- /dev/null +++ b/.gitignore @@ -0,0 +1,79 @@ +.ipynb_checkpoints +lib/build +lib/pycocotools/_mask.c +lib/pycocotools/_mask.so +caffe +output + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# IPython Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# dotenv +.env + +# virtualenv +venv/ +ENV/ + +# Spyder project settings +.spyderproject + +# Rope project settings +.ropeproject diff --git a/ImageSets/Main/test.txt b/ImageSets/Main/test.txt new file mode 100644 index 0000000..30177c1 --- /dev/null +++ b/ImageSets/Main/test.txt @@ -0,0 +1,4952 @@ +000001 +000002 +000003 +000004 +000006 +000008 +000010 +000011 +000013 +000014 +000015 +000018 +000022 +000025 +000027 +000028 +000029 +000031 +000037 +000038 +000040 +000043 +000045 +000049 +000053 +000054 +000055 +000056 +000057 +000058 +000059 +000062 +000067 +000068 +000069 +000070 +000071 +000074 +000075 +000076 +000079 +000080 +000082 +000084 +000085 +000086 +000087 +000088 +000090 +000092 +000094 +000096 +000097 +000098 +000100 +000103 +000105 +000106 +000108 +000111 +000114 +000115 +000116 +000119 +000124 +000126 +000127 +000128 +000135 +000136 +000137 +000139 +000144 +000145 +000148 +000149 +000151 +000152 +000155 +000157 +000160 +000166 +000167 +000168 +000172 +000175 +000176 +000178 +000179 +000181 +000182 +000183 +000185 +000186 +000188 +000191 +000195 +000196 +000197 +000199 +000201 +000202 +000204 +000205 +000206 +000212 +000213 +000216 +000217 +000223 +000226 +000227 +000230 +000231 +000234 +000237 +000238 +000239 +000240 +000243 +000247 +000248 +000252 +000253 +000254 +000255 +000258 +000260 +000261 +000264 +000265 +000267 +000271 +000272 +000273 +000274 +000277 +000279 +000280 +000281 +000283 +000284 +000286 +000287 +000290 +000291 +000292 +000293 +000295 +000297 +000299 +000300 +000301 +000309 +000310 +000313 +000314 +000315 +000316 +000319 +000324 +000326 +000327 +000330 +000333 +000335 +000339 +000341 +000342 +000345 +000346 +000348 +000350 +000351 +000353 +000356 +000357 +000358 +000360 +000361 +000362 +000364 +000365 +000366 +000368 +000369 +000371 +000375 +000376 +000377 +000378 +000383 +000384 +000385 +000386 +000388 +000389 +000390 +000392 +000393 +000397 +000398 +000399 +000401 +000402 +000405 +000409 +000410 +000412 +000413 +000414 +000415 +000418 +000421 +000422 +000423 +000425 +000426 +000429 +000432 +000434 +000436 +000437 +000440 +000441 +000442 +000444 +000445 +000447 +000449 +000451 +000452 +000453 +000455 +000456 +000457 +000458 +000465 +000466 +000467 +000471 +000472 +000473 +000475 +000478 +000479 +000481 +000485 +000487 +000488 +000490 +000493 +000495 +000497 +000502 +000504 +000505 +000506 +000507 +000510 +000511 +000512 +000517 +000521 +000527 +000529 +000532 +000533 +000534 +000536 +000538 +000539 +000542 +000546 +000547 +000548 +000551 +000553 +000556 +000557 +000558 +000560 +000561 +000562 +000566 +000567 +000568 +000569 +000570 +000571 +000572 +000573 +000574 +000575 +000576 +000578 +000580 +000584 +000585 +000586 +000587 +000593 +000594 +000595 +000596 +000600 +000602 +000603 +000604 +000606 +000607 +000611 +000614 +000615 +000616 +000617 +000618 +000621 +000623 +000624 +000627 +000629 +000630 +000631 +000634 +000636 +000638 +000639 +000640 +000641 +000642 +000643 +000644 +000646 +000649 +000650 +000651 +000652 +000655 +000658 +000659 +000662 +000664 +000665 +000666 +000668 +000669 +000670 +000673 +000674 +000678 +000679 +000681 +000683 +000687 +000691 +000692 +000693 +000696 +000697 +000698 +000701 +000703 +000704 +000706 +000708 +000715 +000716 +000718 +000719 +000721 +000722 +000723 +000724 +000725 +000727 +000732 +000734 +000735 +000736 +000737 +000741 +000743 +000744 +000745 +000747 +000749 +000751 +000757 +000758 +000759 +000762 +000765 +000766 +000769 +000773 +000775 +000778 +000779 +000781 +000783 +000784 +000785 +000788 +000789 +000790 +000792 +000795 +000798 +000801 +000803 +000807 +000809 +000811 +000813 +000817 +000819 +000821 +000824 +000825 +000833 +000835 +000836 +000837 +000838 +000839 +000840 +000841 +000844 +000846 +000852 +000853 +000856 +000858 +000861 +000864 +000866 +000869 +000870 +000873 +000875 +000877 +000881 +000883 +000884 +000886 +000890 +000891 +000893 +000894 +000897 +000901 +000905 +000907 +000909 +000910 +000913 +000914 +000916 +000922 +000924 +000925 +000927 +000928 +000930 +000932 +000933 +000938 +000939 +000940 +000941 +000942 +000944 +000945 +000952 +000953 +000955 +000956 +000957 +000959 +000960 +000961 +000963 +000968 +000969 +000970 +000974 +000975 +000976 +000978 +000979 +000981 +000983 +000984 +000985 +000986 +000988 +000990 +000992 +000994 +000995 +000998 +001000 +001003 +001005 +001006 +001007 +001013 +001016 +001019 +001020 +001021 +001022 +001023 +001025 +001026 +001029 +001030 +001031 +001032 +001033 +001034 +001035 +001037 +001038 +001039 +001040 +001044 +001046 +001047 +001048 +001049 +001051 +001054 +001055 +001058 +001059 +001063 +001065 +001067 +001070 +001075 +001076 +001080 +001081 +001085 +001086 +001087 +001088 +001089 +001090 +001094 +001095 +001096 +001098 +001099 +001100 +001103 +001105 +001108 +001111 +001114 +001115 +001116 +001117 +001118 +001120 +001122 +001123 +001126 +001128 +001131 +001132 +001133 +001134 +001135 +001138 +001139 +001141 +001146 +001150 +001153 +001155 +001157 +001159 +001162 +001163 +001165 +001167 +001169 +001173 +001177 +001178 +001179 +001180 +001181 +001183 +001188 +001189 +001190 +001193 +001195 +001196 +001197 +001198 +001202 +001208 +001210 +001213 +001216 +001217 +001218 +001219 +001220 +001222 +001223 +001227 +001228 +001232 +001235 +001238 +001242 +001243 +001244 +001245 +001246 +001249 +001251 +001252 +001253 +001255 +001256 +001257 +001261 +001262 +001264 +001267 +001271 +001275 +001276 +001278 +001280 +001282 +001283 +001285 +001291 +001295 +001296 +001297 +001300 +001301 +001302 +001303 +001305 +001306 +001307 +001308 +001313 +001317 +001318 +001319 +001320 +001321 +001322 +001328 +001329 +001331 +001335 +001336 +001338 +001339 +001340 +001342 +001344 +001347 +001349 +001351 +001353 +001354 +001355 +001356 +001357 +001358 +001359 +001363 +001366 +001367 +001368 +001369 +001370 +001372 +001373 +001374 +001376 +001377 +001379 +001380 +001381 +001382 +001389 +001391 +001392 +001394 +001396 +001398 +001399 +001401 +001403 +001407 +001410 +001411 +001412 +001415 +001416 +001417 +001419 +001422 +001423 +001424 +001425 +001428 +001429 +001431 +001433 +001435 +001437 +001438 +001440 +001446 +001447 +001448 +001449 +001452 +001454 +001456 +001458 +001459 +001461 +001462 +001469 +001471 +001473 +001474 +001476 +001477 +001478 +001482 +001487 +001489 +001491 +001495 +001496 +001500 +001502 +001503 +001505 +001506 +001507 +001508 +001511 +001513 +001516 +001518 +001519 +001520 +001525 +001527 +001530 +001533 +001534 +001535 +001538 +001540 +001542 +001546 +001547 +001549 +001550 +001551 +001552 +001558 +001560 +001562 +001564 +001566 +001567 +001568 +001569 +001570 +001572 +001573 +001574 +001575 +001578 +001581 +001583 +001584 +001585 +001587 +001589 +001591 +001592 +001596 +001599 +001600 +001601 +001602 +001605 +001606 +001609 +001613 +001615 +001616 +001619 +001620 +001621 +001623 +001624 +001625 +001626 +001629 +001631 +001634 +001635 +001637 +001639 +001641 +001644 +001645 +001646 +001648 +001652 +001655 +001656 +001657 +001658 +001659 +001660 +001663 +001664 +001665 +001666 +001667 +001668 +001670 +001671 +001672 +001674 +001679 +001681 +001687 +001692 +001694 +001695 +001696 +001697 +001698 +001700 +001701 +001702 +001703 +001704 +001705 +001706 +001709 +001710 +001712 +001715 +001716 +001719 +001720 +001722 +001728 +001731 +001735 +001736 +001737 +001740 +001742 +001743 +001744 +001745 +001748 +001751 +001753 +001757 +001760 +001762 +001763 +001764 +001767 +001769 +001770 +001773 +001774 +001776 +001779 +001781 +001783 +001786 +001788 +001790 +001791 +001792 +001794 +001796 +001798 +001802 +001803 +001804 +001805 +001808 +001811 +001812 +001813 +001814 +001815 +001817 +001819 +001820 +001822 +001823 +001824 +001826 +001829 +001831 +001835 +001838 +001839 +001844 +001846 +001848 +001850 +001851 +001852 +001856 +001857 +001859 +001863 +001865 +001866 +001867 +001868 +001869 +001871 +001873 +001874 +001876 +001879 +001880 +001883 +001884 +001885 +001886 +001889 +001890 +001891 +001893 +001895 +001897 +001900 +001905 +001908 +001909 +001910 +001912 +001913 +001914 +001916 +001917 +001919 +001921 +001923 +001924 +001925 +001926 +001929 +001935 +001939 +001942 +001943 +001946 +001947 +001949 +001951 +001953 +001955 +001956 +001957 +001959 +001961 +001965 +001966 +001967 +001968 +001969 +001973 +001974 +001975 +001979 +001983 +001984 +001986 +001987 +001988 +001990 +001991 +001992 +001993 +001994 +001996 +001997 +001998 +002003 +002005 +002007 +002008 +002009 +002010 +002013 +002014 +002016 +002017 +002018 +002026 +002028 +002029 +002031 +002032 +002033 +002035 +002038 +002040 +002041 +002044 +002046 +002048 +002050 +002052 +002053 +002057 +002059 +002060 +002062 +002065 +002066 +002071 +002072 +002073 +002074 +002075 +002076 +002077 +002078 +002079 +002080 +002081 +002084 +002085 +002087 +002089 +002092 +002093 +002097 +002100 +002103 +002105 +002106 +002107 +002110 +002111 +002113 +002115 +002118 +002119 +002121 +002122 +002123 +002127 +002128 +002130 +002131 +002133 +002137 +002138 +002141 +002143 +002144 +002147 +002148 +002149 +002150 +002154 +002157 +002159 +002160 +002161 +002162 +002164 +002167 +002168 +002173 +002175 +002177 +002185 +002188 +002189 +002195 +002198 +002200 +002203 +002204 +002205 +002206 +002207 +002210 +002211 +002216 +002217 +002222 +002223 +002225 +002227 +002229 +002230 +002231 +002232 +002235 +002236 +002239 +002240 +002242 +002243 +002245 +002246 +002250 +002252 +002254 +002258 +002262 +002264 +002269 +002271 +002274 +002275 +002282 +002283 +002286 +002289 +002292 +002294 +002295 +002296 +002297 +002298 +002299 +002301 +002303 +002304 +002309 +002312 +002313 +002314 +002316 +002317 +002319 +002322 +002325 +002326 +002327 +002331 +002336 +002338 +002339 +002341 +002344 +002346 +002349 +002351 +002353 +002356 +002357 +002358 +002360 +002363 +002365 +002370 +002379 +002380 +002381 +002383 +002386 +002388 +002389 +002390 +002394 +002395 +002396 +002397 +002398 +002399 +002400 +002402 +002406 +002408 +002409 +002412 +002414 +002416 +002418 +002421 +002422 +002424 +002426 +002428 +002429 +002430 +002431 +002432 +002434 +002438 +002440 +002446 +002447 +002449 +002451 +002453 +002455 +002457 +002463 +002464 +002467 +002469 +002473 +002474 +002475 +002482 +002484 +002485 +002486 +002487 +002488 +002489 +002495 +002498 +002499 +002503 +002506 +002507 +002509 +002510 +002511 +002515 +002516 +002517 +002521 +002522 +002526 +002527 +002528 +002530 +002531 +002532 +002535 +002536 +002538 +002541 +002543 +002548 +002550 +002551 +002552 +002553 +002556 +002557 +002560 +002562 +002568 +002570 +002573 +002574 +002575 +002576 +002577 +002580 +002581 +002582 +002583 +002587 +002588 +002591 +002592 +002596 +002597 +002601 +002602 +002604 +002607 +002608 +002610 +002612 +002614 +002616 +002617 +002619 +002620 +002622 +002623 +002624 +002626 +002628 +002629 +002630 +002631 +002638 +002639 +002640 +002642 +002644 +002650 +002651 +002652 +002654 +002655 +002656 +002660 +002661 +002663 +002665 +002671 +002672 +002673 +002674 +002676 +002679 +002681 +002685 +002686 +002687 +002688 +002692 +002694 +002698 +002700 +002701 +002703 +002705 +002707 +002708 +002711 +002712 +002716 +002719 +002720 +002724 +002725 +002726 +002728 +002729 +002731 +002733 +002736 +002739 +002740 +002742 +002743 +002746 +002748 +002750 +002752 +002753 +002754 +002756 +002758 +002761 +002764 +002768 +002769 +002770 +002771 +002773 +002777 +002780 +002781 +002787 +002788 +002789 +002790 +002792 +002793 +002797 +002799 +002802 +002805 +002806 +002808 +002809 +002811 +002813 +002814 +002818 +002819 +002821 +002822 +002823 +002824 +002825 +002828 +002829 +002830 +002831 +002832 +002837 +002839 +002840 +002843 +002846 +002849 +002850 +002851 +002852 +002853 +002856 +002857 +002860 +002861 +002862 +002863 +002865 +002871 +002872 +002874 +002876 +002877 +002878 +002882 +002883 +002885 +002887 +002888 +002890 +002892 +002894 +002895 +002897 +002898 +002900 +002902 +002903 +002904 +002905 +002907 +002908 +002909 +002911 +002918 +002920 +002921 +002922 +002923 +002925 +002926 +002927 +002928 +002929 +002930 +002936 +002945 +002948 +002949 +002950 +002951 +002955 +002959 +002961 +002964 +002968 +002970 +002971 +002972 +002973 +002974 +002979 +002980 +002981 +002982 +002983 +002985 +002991 +002993 +002996 +002997 +002998 +002999 +003001 +003006 +003010 +003012 +003014 +003016 +003018 +003019 +003020 +003022 +003025 +003026 +003029 +003030 +003033 +003035 +003036 +003037 +003040 +003041 +003043 +003046 +003048 +003049 +003050 +003052 +003055 +003059 +003060 +003062 +003067 +003068 +003069 +003070 +003071 +003073 +003075 +003076 +003079 +003080 +003081 +003084 +003087 +003091 +003095 +003096 +003097 +003099 +003101 +003104 +003109 +003111 +003113 +003114 +003115 +003119 +003123 +003125 +003128 +003130 +003131 +003132 +003136 +003139 +003141 +003143 +003144 +003148 +003151 +003152 +003153 +003156 +003158 +003160 +003166 +003167 +003168 +003171 +003172 +003173 +003174 +003179 +003180 +003182 +003187 +003190 +003191 +003192 +003193 +003196 +003197 +003198 +003201 +003203 +003206 +003208 +003209 +003212 +003215 +003217 +003220 +003221 +003222 +003224 +003225 +003226 +003227 +003230 +003232 +003234 +003235 +003237 +003238 +003241 +003245 +003246 +003248 +003249 +003251 +003252 +003257 +003263 +003264 +003265 +003266 +003267 +003268 +003275 +003276 +003277 +003278 +003281 +003283 +003286 +003287 +003288 +003289 +003291 +003295 +003297 +003298 +003302 +003304 +003305 +003306 +003309 +003310 +003312 +003314 +003315 +003317 +003318 +003319 +003321 +003322 +003323 +003324 +003326 +003328 +003329 +003332 +003333 +003334 +003340 +003341 +003342 +003345 +003346 +003347 +003348 +003352 +003353 +003357 +003358 +003361 +003364 +003366 +003368 +003371 +003372 +003375 +003378 +003381 +003383 +003384 +003385 +003387 +003388 +003389 +003393 +003394 +003399 +003400 +003402 +003405 +003409 +003411 +003414 +003418 +003423 +003426 +003427 +003428 +003431 +003432 +003434 +003437 +003438 +003440 +003442 +003445 +003446 +003447 +003448 +003454 +003456 +003457 +003459 +003460 +003463 +003467 +003471 +003472 +003473 +003474 +003475 +003476 +003478 +003479 +003480 +003481 +003482 +003483 +003485 +003486 +003488 +003490 +003494 +003495 +003498 +003501 +003502 +003503 +003504 +003505 +003507 +003512 +003513 +003514 +003515 +003517 +003520 +003523 +003526 +003527 +003531 +003532 +003533 +003534 +003535 +003538 +003540 +003541 +003542 +003543 +003544 +003545 +003547 +003552 +003553 +003557 +003558 +003559 +003560 +003561 +003562 +003563 +003568 +003569 +003570 +003571 +003572 +003573 +003574 +003578 +003579 +003581 +003582 +003583 +003584 +003590 +003591 +003592 +003595 +003598 +003600 +003601 +003602 +003607 +003610 +003612 +003613 +003615 +003616 +003617 +003619 +003624 +003626 +003630 +003631 +003633 +003637 +003641 +003643 +003647 +003649 +003650 +003652 +003653 +003659 +003661 +003665 +003666 +003668 +003670 +003672 +003676 +003677 +003680 +003682 +003683 +003686 +003687 +003689 +003692 +003693 +003697 +003701 +003702 +003707 +003710 +003712 +003715 +003716 +003718 +003719 +003720 +003723 +003724 +003725 +003726 +003728 +003730 +003731 +003733 +003734 +003736 +003737 +003738 +003739 +003741 +003742 +003744 +003745 +003746 +003747 +003755 +003756 +003757 +003761 +003762 +003764 +003765 +003766 +003768 +003769 +003770 +003771 +003775 +003776 +003777 +003778 +003782 +003785 +003787 +003789 +003794 +003795 +003799 +003800 +003801 +003802 +003804 +003805 +003810 +003812 +003813 +003815 +003816 +003819 +003822 +003823 +003825 +003829 +003831 +003832 +003833 +003836 +003839 +003840 +003841 +003842 +003843 +003850 +003851 +003852 +003853 +003854 +003858 +003862 +003864 +003867 +003870 +003873 +003875 +003878 +003880 +003881 +003882 +003883 +003884 +003888 +003892 +003893 +003894 +003896 +003897 +003900 +003901 +003902 +003903 +003904 +003906 +003908 +003909 +003910 +003914 +003916 +003917 +003920 +003922 +003925 +003927 +003928 +003929 +003930 +003931 +003933 +003934 +003938 +003940 +003942 +003943 +003944 +003950 +003951 +003952 +003955 +003958 +003959 +003962 +003964 +003967 +003968 +003972 +003975 +003976 +003977 +003978 +003980 +003981 +003982 +003985 +003989 +003995 +003999 +004000 +004001 +004002 +004004 +004006 +004007 +004018 +004021 +004022 +004024 +004026 +004027 +004029 +004030 +004032 +004036 +004038 +004040 +004041 +004042 +004043 +004044 +004045 +004048 +004049 +004050 +004053 +004054 +004055 +004056 +004059 +004061 +004062 +004063 +004064 +004065 +004068 +004070 +004071 +004072 +004074 +004078 +004079 +004080 +004081 +004083 +004084 +004086 +004088 +004090 +004094 +004096 +004097 +004098 +004099 +004101 +004103 +004104 +004107 +004109 +004112 +004114 +004115 +004116 +004118 +004119 +004123 +004124 +004125 +004126 +004127 +004128 +004130 +004132 +004134 +004139 +004144 +004147 +004151 +004153 +004154 +004155 +004156 +004157 +004159 +004160 +004161 +004162 +004165 +004166 +004167 +004172 +004173 +004175 +004176 +004177 +004179 +004180 +004181 +004182 +004183 +004184 +004187 +004188 +004197 +004198 +004199 +004202 +004206 +004207 +004208 +004210 +004211 +004213 +004214 +004216 +004217 +004218 +004219 +004222 +004225 +004226 +004227 +004233 +004234 +004235 +004236 +004238 +004240 +004243 +004245 +004248 +004249 +004250 +004251 +004252 +004254 +004260 +004261 +004262 +004266 +004267 +004268 +004276 +004277 +004278 +004282 +004285 +004288 +004289 +004290 +004294 +004297 +004299 +004301 +004302 +004305 +004306 +004308 +004309 +004311 +004313 +004314 +004316 +004317 +004319 +004320 +004324 +004328 +004330 +004332 +004334 +004335 +004336 +004337 +004340 +004342 +004343 +004344 +004348 +004350 +004353 +004355 +004357 +004358 +004362 +004363 +004366 +004373 +004374 +004375 +004377 +004378 +004381 +004382 +004383 +004385 +004388 +004393 +004394 +004395 +004398 +004399 +004400 +004401 +004402 +004403 +004406 +004407 +004408 +004410 +004412 +004413 +004414 +004415 +004416 +004417 +004418 +004419 +004420 +004422 +004425 +004426 +004427 +004428 +004431 +004435 +004440 +004442 +004443 +004444 +004445 +004447 +004448 +004449 +004451 +004453 +004454 +004456 +004458 +004460 +004461 +004462 +004465 +004467 +004469 +004472 +004473 +004475 +004476 +004477 +004478 +004480 +004482 +004483 +004485 +004486 +004489 +004491 +004492 +004497 +004501 +004503 +004504 +004505 +004506 +004511 +004513 +004515 +004516 +004521 +004522 +004523 +004525 +004529 +004531 +004533 +004534 +004536 +004538 +004541 +004543 +004545 +004546 +004547 +004550 +004554 +004556 +004557 +004559 +004560 +004561 +004564 +004567 +004568 +004569 +004572 +004573 +004575 +004577 +004578 +004580 +004582 +004583 +004586 +004589 +004590 +004593 +004594 +004596 +004598 +004599 +004602 +004603 +004608 +004610 +004613 +004614 +004615 +004616 +004617 +004619 +004620 +004621 +004624 +004629 +004633 +004635 +004637 +004638 +004639 +004640 +004641 +004642 +004645 +004646 +004650 +004657 +004658 +004659 +004661 +004663 +004664 +004665 +004666 +004667 +004668 +004669 +004670 +004677 +004678 +004680 +004681 +004684 +004688 +004690 +004695 +004696 +004697 +004698 +004700 +004703 +004704 +004709 +004711 +004712 +004713 +004716 +004717 +004720 +004721 +004724 +004725 +004726 +004728 +004729 +004730 +004731 +004733 +004734 +004736 +004738 +004739 +004740 +004741 +004744 +004745 +004749 +004751 +004752 +004755 +004756 +004757 +004758 +004759 +004762 +004763 +004764 +004765 +004766 +004767 +004769 +004771 +004772 +004774 +004775 +004778 +004780 +004781 +004784 +004787 +004791 +004795 +004798 +004800 +004802 +004803 +004804 +004806 +004807 +004809 +004810 +004811 +004813 +004817 +004819 +004820 +004821 +004822 +004824 +004827 +004829 +004833 +004835 +004838 +004843 +004844 +004845 +004847 +004851 +004853 +004854 +004855 +004858 +004860 +004861 +004862 +004864 +004865 +004870 +004871 +004874 +004875 +004877 +004880 +004881 +004883 +004884 +004887 +004888 +004889 +004891 +004892 +004893 +004894 +004899 +004900 +004901 +004904 +004906 +004908 +004909 +004914 +004915 +004917 +004918 +004919 +004920 +004921 +004922 +004923 +004924 +004925 +004927 +004930 +004932 +004933 +004934 +004937 +004940 +004941 +004942 +004944 +004945 +004947 +004949 +004952 +004957 +004959 +004964 +004965 +004969 +004970 +004971 +004975 +004978 +004979 +004980 +004981 +004988 +004989 +004993 +004996 +005000 +005002 +005005 +005008 +005009 +005010 +005011 +005012 +005013 +005015 +005017 +005019 +005021 +005022 +005025 +005030 +005031 +005034 +005035 +005038 +005040 +005041 +005043 +005044 +005046 +005048 +005049 +005050 +005051 +005053 +005059 +005060 +005066 +005069 +005070 +005074 +005075 +005076 +005080 +005082 +005083 +005087 +005088 +005089 +005091 +005092 +005095 +005096 +005098 +005099 +005100 +005103 +005105 +005106 +005109 +005112 +005113 +005115 +005117 +005118 +005119 +005120 +005123 +005125 +005126 +005127 +005132 +005133 +005137 +005139 +005140 +005141 +005142 +005147 +005148 +005149 +005151 +005152 +005154 +005155 +005157 +005158 +005162 +005163 +005164 +005165 +005166 +005167 +005170 +005172 +005174 +005178 +005180 +005182 +005184 +005187 +005188 +005192 +005193 +005194 +005196 +005197 +005198 +005200 +005201 +005204 +005205 +005206 +005207 +005211 +005213 +005216 +005218 +005221 +005225 +005226 +005227 +005228 +005232 +005233 +005234 +005235 +005237 +005238 +005240 +005241 +005243 +005247 +005249 +005250 +005251 +005252 +005255 +005256 +005261 +005265 +005266 +005271 +005272 +005275 +005276 +005277 +005279 +005280 +005282 +005284 +005286 +005287 +005289 +005291 +005294 +005295 +005296 +005299 +005300 +005301 +005302 +005308 +005309 +005313 +005316 +005317 +005321 +005322 +005323 +005324 +005329 +005330 +005332 +005333 +005334 +005335 +005339 +005341 +005342 +005347 +005353 +005354 +005356 +005357 +005359 +005361 +005362 +005364 +005366 +005372 +005375 +005376 +005377 +005381 +005382 +005386 +005390 +005392 +005394 +005399 +005400 +005401 +005402 +005403 +005409 +005411 +005412 +005415 +005422 +005425 +005426 +005427 +005428 +005432 +005435 +005437 +005442 +005443 +005444 +005446 +005447 +005449 +005452 +005456 +005458 +005459 +005460 +005462 +005463 +005464 +005466 +005468 +005472 +005473 +005474 +005476 +005477 +005479 +005480 +005482 +005484 +005488 +005490 +005491 +005492 +005493 +005494 +005495 +005498 +005500 +005501 +005502 +005503 +005504 +005505 +005506 +005512 +005513 +005516 +005520 +005523 +005525 +005528 +005529 +005532 +005533 +005534 +005537 +005538 +005540 +005543 +005545 +005546 +005548 +005551 +005553 +005555 +005556 +005557 +005558 +005560 +005561 +005562 +005564 +005565 +005567 +005569 +005570 +005571 +005572 +005575 +005578 +005580 +005581 +005587 +005589 +005594 +005595 +005596 +005597 +005598 +005602 +005604 +005607 +005610 +005612 +005616 +005617 +005619 +005621 +005622 +005623 +005626 +005627 +005628 +005632 +005633 +005634 +005635 +005638 +005642 +005643 +005646 +005649 +005650 +005651 +005656 +005659 +005661 +005663 +005665 +005666 +005667 +005670 +005671 +005673 +005675 +005677 +005678 +005681 +005683 +005684 +005688 +005689 +005690 +005691 +005692 +005694 +005698 +005703 +005706 +005707 +005708 +005709 +005711 +005712 +005717 +005720 +005721 +005722 +005724 +005725 +005726 +005727 +005733 +005734 +005737 +005739 +005744 +005745 +005746 +005748 +005750 +005751 +005753 +005754 +005758 +005759 +005763 +005766 +005767 +005770 +005771 +005772 +005774 +005775 +005776 +005777 +005778 +005785 +005787 +005792 +005793 +005795 +005797 +005798 +005800 +005801 +005802 +005804 +005807 +005808 +005809 +005810 +005816 +005820 +005822 +005823 +005827 +005832 +005833 +005834 +005835 +005837 +005842 +005844 +005846 +005847 +005848 +005849 +005855 +005857 +005858 +005862 +005865 +005866 +005869 +005870 +005871 +005872 +005876 +005880 +005882 +005883 +005886 +005887 +005890 +005891 +005892 +005896 +005898 +005900 +005902 +005904 +005907 +005913 +005915 +005916 +005921 +005922 +005924 +005925 +005926 +005927 +005929 +005931 +005932 +005933 +005934 +005935 +005936 +005937 +005939 +005941 +005942 +005943 +005944 +005945 +005946 +005949 +005950 +005953 +005955 +005957 +005958 +005959 +005962 +005965 +005966 +005967 +005969 +005972 +005973 +005974 +005976 +005977 +005978 +005982 +005986 +005987 +005993 +005994 +005997 +005999 +006002 +006003 +006006 +006007 +006008 +006010 +006013 +006014 +006015 +006016 +006017 +006019 +006021 +006022 +006024 +006031 +006032 +006034 +006036 +006037 +006039 +006040 +006044 +006047 +006048 +006049 +006050 +006051 +006052 +006053 +006054 +006056 +006057 +006059 +006060 +006063 +006064 +006068 +006072 +006075 +006076 +006077 +006080 +006081 +006082 +006083 +006085 +006086 +006087 +006090 +006092 +006093 +006094 +006099 +006101 +006102 +006106 +006109 +006110 +006112 +006113 +006114 +006115 +006116 +006118 +006119 +006121 +006122 +006126 +006127 +006132 +006137 +006138 +006142 +006143 +006144 +006145 +006147 +006149 +006152 +006154 +006155 +006157 +006160 +006164 +006165 +006167 +006168 +006169 +006173 +006178 +006182 +006186 +006191 +006192 +006193 +006194 +006195 +006197 +006199 +006200 +006204 +006205 +006207 +006211 +006213 +006217 +006226 +006227 +006228 +006231 +006232 +006237 +006239 +006242 +006244 +006245 +006246 +006248 +006253 +006255 +006256 +006257 +006263 +006265 +006266 +006268 +006271 +006273 +006274 +006278 +006280 +006283 +006287 +006288 +006292 +006293 +006294 +006297 +006298 +006302 +006303 +006307 +006308 +006310 +006311 +006312 +006313 +006315 +006316 +006317 +006322 +006324 +006326 +006327 +006328 +006331 +006332 +006333 +006334 +006336 +006340 +006342 +006343 +006345 +006347 +006354 +006356 +006358 +006359 +006360 +006361 +006364 +006365 +006368 +006370 +006372 +006373 +006376 +006378 +006379 +006380 +006383 +006384 +006386 +006388 +006389 +006390 +006393 +006394 +006397 +006399 +006401 +006402 +006403 +006405 +006406 +006407 +006408 +006410 +006412 +006413 +006414 +006415 +006416 +006420 +006422 +006423 +006426 +006431 +006432 +006435 +006439 +006441 +006446 +006451 +006452 +006453 +006454 +006457 +006460 +006461 +006464 +006467 +006469 +006471 +006477 +006478 +006479 +006481 +006485 +006487 +006489 +006490 +006491 +006493 +006494 +006496 +006498 +006500 +006502 +006504 +006505 +006508 +006510 +006511 +006513 +006514 +006516 +006517 +006518 +006521 +006522 +006525 +006526 +006527 +006528 +006531 +006533 +006535 +006537 +006539 +006540 +006541 +006544 +006545 +006546 +006552 +006554 +006555 +006557 +006558 +006559 +006561 +006563 +006566 +006567 +006568 +006571 +006573 +006574 +006577 +006579 +006580 +006581 +006582 +006586 +006589 +006590 +006591 +006592 +006594 +006596 +006598 +006600 +006601 +006604 +006607 +006608 +006613 +006614 +006615 +006616 +006620 +006623 +006624 +006629 +006630 +006633 +006634 +006639 +006640 +006641 +006642 +006644 +006646 +006649 +006650 +006651 +006653 +006655 +006656 +006659 +006662 +006663 +006665 +006669 +006672 +006675 +006676 +006680 +006683 +006685 +006686 +006688 +006691 +006692 +006693 +006700 +006701 +006705 +006710 +006711 +006712 +006713 +006715 +006716 +006717 +006720 +006721 +006723 +006724 +006728 +006729 +006732 +006733 +006737 +006741 +006742 +006743 +006744 +006745 +006746 +006749 +006750 +006752 +006754 +006756 +006757 +006758 +006763 +006764 +006767 +006770 +006771 +006774 +006775 +006776 +006778 +006779 +006780 +006785 +006787 +006788 +006790 +006791 +006792 +006793 +006795 +006796 +006798 +006801 +006804 +006807 +006809 +006811 +006812 +006815 +006816 +006817 +006818 +006820 +006823 +006826 +006830 +006831 +006832 +006834 +006837 +006843 +006846 +006851 +006853 +006854 +006856 +006857 +006861 +006863 +006870 +006871 +006872 +006873 +006875 +006877 +006879 +006881 +006882 +006885 +006888 +006889 +006890 +006891 +006894 +006895 +006897 +006898 +006901 +006902 +006904 +006905 +006906 +006907 +006913 +006915 +006920 +006921 +006923 +006925 +006926 +006927 +006928 +006929 +006936 +006937 +006938 +006941 +006942 +006946 +006951 +006954 +006955 +006957 +006960 +006961 +006964 +006967 +006969 +006970 +006973 +006974 +006975 +006977 +006978 +006979 +006980 +006982 +006984 +006985 +006986 +006991 +006992 +006993 +006996 +006997 +006998 +006999 +007000 +007001 +007005 +007010 +007012 +007013 +007014 +007015 +007017 +007019 +007024 +007026 +007027 +007028 +007030 +007032 +007034 +007037 +007041 +007043 +007044 +007047 +007051 +007053 +007055 +007057 +007060 +007061 +007063 +007066 +007067 +007069 +007076 +007081 +007082 +007083 +007085 +007087 +007091 +007094 +007096 +007098 +007099 +007102 +007103 +007106 +007107 +007110 +007111 +007112 +007115 +007116 +007118 +007119 +007120 +007124 +007126 +007127 +007131 +007134 +007135 +007136 +007137 +007142 +007143 +007145 +007151 +007155 +007156 +007157 +007158 +007160 +007161 +007164 +007169 +007170 +007171 +007173 +007175 +007176 +007178 +007179 +007181 +007183 +007186 +007188 +007190 +007192 +007195 +007196 +007198 +007199 +007201 +007202 +007203 +007206 +007207 +007209 +007218 +007220 +007221 +007225 +007226 +007228 +007229 +007231 +007232 +007233 +007235 +007237 +007238 +007239 +007240 +007242 +007246 +007248 +007251 +007252 +007253 +007254 +007255 +007257 +007262 +007264 +007265 +007267 +007268 +007269 +007272 +007273 +007277 +007278 +007281 +007282 +007286 +007287 +007288 +007290 +007291 +007293 +007301 +007303 +007304 +007306 +007307 +007309 +007310 +007312 +007313 +007315 +007316 +007317 +007319 +007320 +007321 +007324 +007326 +007328 +007331 +007332 +007333 +007335 +007337 +007338 +007339 +007340 +007341 +007342 +007345 +007347 +007348 +007349 +007352 +007353 +007354 +007355 +007357 +007358 +007360 +007362 +007364 +007366 +007367 +007368 +007371 +007377 +007378 +007379 +007380 +007382 +007384 +007386 +007387 +007391 +007392 +007393 +007395 +007397 +007399 +007400 +007401 +007402 +007403 +007404 +007405 +007406 +007407 +007409 +007412 +007415 +007418 +007420 +007423 +007426 +007428 +007429 +007430 +007434 +007440 +007441 +007442 +007444 +007447 +007450 +007452 +007453 +007455 +007456 +007459 +007462 +007463 +007464 +007469 +007471 +007472 +007473 +007476 +007478 +007485 +007487 +007488 +007492 +007494 +007495 +007496 +007499 +007500 +007501 +007502 +007504 +007505 +007507 +007508 +007509 +007510 +007512 +007514 +007515 +007516 +007518 +007520 +007522 +007529 +007531 +007532 +007534 +007539 +007541 +007542 +007545 +007548 +007549 +007550 +007552 +007553 +007554 +007556 +007557 +007560 +007561 +007562 +007564 +007567 +007569 +007573 +007574 +007577 +007580 +007581 +007582 +007583 +007584 +007587 +007588 +007589 +007591 +007593 +007595 +007596 +007597 +007598 +007599 +007602 +007604 +007607 +007608 +007609 +007610 +007613 +007616 +007617 +007620 +007623 +007625 +007627 +007628 +007630 +007632 +007634 +007635 +007636 +007638 +007641 +007643 +007644 +007645 +007646 +007648 +007651 +007652 +007658 +007659 +007660 +007661 +007665 +007669 +007674 +007676 +007681 +007684 +007686 +007689 +007690 +007693 +007695 +007698 +007700 +007701 +007703 +007706 +007707 +007708 +007710 +007711 +007714 +007716 +007717 +007719 +007722 +007725 +007726 +007728 +007730 +007733 +007734 +007737 +007738 +007739 +007741 +007744 +007747 +007750 +007752 +007755 +007756 +007757 +007759 +007761 +007764 +007766 +007769 +007770 +007771 +007774 +007778 +007780 +007782 +007783 +007784 +007785 +007787 +007788 +007789 +007792 +007794 +007796 +007797 +007800 +007801 +007802 +007804 +007805 +007806 +007807 +007808 +007811 +007816 +007817 +007818 +007822 +007823 +007825 +007827 +007828 +007829 +007830 +007832 +007835 +007837 +007839 +007842 +007844 +007846 +007848 +007849 +007850 +007851 +007852 +007858 +007860 +007861 +007862 +007866 +007867 +007870 +007871 +007874 +007875 +007879 +007880 +007881 +007882 +007887 +007888 +007891 +007892 +007893 +007894 +007895 +007896 +007903 +007904 +007906 +007907 +007912 +007913 +007917 +007918 +007922 +007927 +007929 +007930 +007934 +007936 +007937 +007938 +007941 +007942 +007944 +007945 +007948 +007949 +007951 +007952 +007955 +007957 +007960 +007961 +007962 +007965 +007966 +007967 +007969 +007972 +007973 +007975 +007977 +007978 +007981 +007982 +007983 +007985 +007986 +007988 +007989 +007990 +007992 +007993 +007994 +007995 +008000 +008003 +008006 +008007 +008010 +008011 +008013 +008014 +008015 +008016 +008018 +008020 +008021 +008022 +008025 +008027 +008028 +008030 +008034 +008035 +008038 +008039 +008041 +008045 +008046 +008047 +008050 +008052 +008054 +008055 +008056 +008058 +008059 +008065 +008066 +008070 +008071 +008073 +008074 +008077 +008078 +008080 +008081 +008088 +008089 +008090 +008092 +008094 +008097 +008099 +008102 +008104 +008109 +008110 +008111 +008113 +008114 +008118 +008119 +008120 +008123 +008124 +008126 +008128 +008129 +008131 +008133 +008134 +008135 +008136 +008143 +008145 +008146 +008147 +008148 +008149 +008152 +008153 +008154 +008155 +008156 +008157 +008158 +008161 +008162 +008165 +008167 +008170 +008172 +008176 +008178 +008179 +008181 +008182 +008183 +008184 +008185 +008187 +008192 +008193 +008194 +008195 +008196 +008198 +008201 +008205 +008206 +008207 +008210 +008212 +008214 +008215 +008217 +008219 +008221 +008227 +008228 +008230 +008231 +008233 +008234 +008237 +008238 +008239 +008240 +008242 +008243 +008245 +008246 +008247 +008249 +008255 +008256 +008257 +008259 +008264 +008265 +008266 +008267 +008270 +008271 +008273 +008274 +008276 +008277 +008278 +008283 +008286 +008287 +008288 +008289 +008290 +008291 +008298 +008303 +008304 +008305 +008308 +008309 +008314 +008321 +008324 +008325 +008328 +008330 +008331 +008333 +008334 +008337 +008339 +008340 +008343 +008344 +008347 +008348 +008350 +008352 +008353 +008354 +008356 +008357 +008358 +008361 +008362 +008363 +008366 +008367 +008369 +008371 +008373 +008375 +008377 +008378 +008379 +008380 +008382 +008383 +008389 +008392 +008393 +008394 +008395 +008396 +008399 +008400 +008401 +008402 +008404 +008405 +008406 +008407 +008408 +008411 +008412 +008414 +008417 +008418 +008419 +008420 +008421 +008428 +008431 +008432 +008435 +008436 +008439 +008440 +008441 +008446 +008447 +008448 +008451 +008455 +008457 +008458 +008459 +008460 +008463 +008464 +008469 +008471 +008473 +008474 +008476 +008479 +008480 +008481 +008486 +008487 +008488 +008489 +008490 +008491 +008493 +008496 +008497 +008500 +008501 +008504 +008505 +008507 +008508 +008510 +008511 +008515 +008516 +008520 +008525 +008527 +008528 +008531 +008532 +008537 +008538 +008539 +008540 +008543 +008544 +008545 +008546 +008547 +008548 +008551 +008552 +008554 +008555 +008560 +008561 +008563 +008565 +008566 +008567 +008569 +008570 +008571 +008574 +008575 +008577 +008578 +008579 +008580 +008583 +008589 +008590 +008591 +008593 +008594 +008597 +008598 +008599 +008600 +008603 +008605 +008609 +008611 +008613 +008614 +008616 +008619 +008622 +008623 +008625 +008626 +008627 +008629 +008630 +008631 +008632 +008634 +008637 +008640 +008641 +008642 +008643 +008646 +008648 +008649 +008650 +008651 +008652 +008656 +008657 +008658 +008659 +008660 +008661 +008662 +008664 +008666 +008668 +008669 +008671 +008672 +008673 +008674 +008675 +008677 +008678 +008679 +008681 +008682 +008684 +008685 +008686 +008689 +008693 +008694 +008696 +008697 +008700 +008703 +008704 +008705 +008707 +008708 +008711 +008712 +008714 +008715 +008719 +008721 +008724 +008726 +008729 +008734 +008735 +008736 +008737 +008740 +008743 +008745 +008746 +008751 +008754 +008758 +008761 +008762 +008763 +008765 +008767 +008774 +008777 +008778 +008779 +008780 +008781 +008782 +008785 +008786 +008787 +008788 +008789 +008791 +008792 +008795 +008797 +008798 +008800 +008802 +008803 +008804 +008807 +008808 +008812 +008816 +008818 +008820 +008821 +008824 +008825 +008827 +008828 +008829 +008830 +008832 +008834 +008839 +008842 +008844 +008845 +008846 +008850 +008851 +008852 +008853 +008855 +008857 +008860 +008861 +008863 +008864 +008866 +008868 +008869 +008870 +008875 +008877 +008881 +008882 +008887 +008889 +008893 +008894 +008895 +008896 +008897 +008898 +008899 +008901 +008902 +008903 +008904 +008906 +008907 +008908 +008910 +008912 +008915 +008916 +008918 +008922 +008924 +008925 +008928 +008934 +008935 +008937 +008938 +008941 +008945 +008946 +008947 +008949 +008950 +008952 +008954 +008956 +008957 +008959 +008963 +008964 +008972 +008974 +008977 +008981 +008984 +008986 +008990 +008991 +008992 +008993 +008994 +008996 +008998 +009001 +009003 +009008 +009009 +009010 +009011 +009012 +009013 +009014 +009017 +009021 +009023 +009025 +009026 +009028 +009030 +009031 +009033 +009038 +009040 +009041 +009043 +009044 +009046 +009047 +009050 +009052 +009054 +009055 +009056 +009057 +009061 +009062 +009065 +009067 +009069 +009070 +009071 +009074 +009075 +009076 +009077 +009081 +009082 +009083 +009084 +009088 +009090 +009092 +009093 +009095 +009096 +009097 +009101 +009102 +009103 +009104 +009107 +009109 +009110 +009111 +009115 +009118 +009119 +009120 +009122 +009124 +009125 +009127 +009130 +009132 +009134 +009135 +009137 +009139 +009140 +009142 +009143 +009145 +009146 +009149 +009152 +009154 +009156 +009158 +009164 +009165 +009167 +009169 +009170 +009171 +009172 +009176 +009182 +009183 +009188 +009190 +009198 +009199 +009201 +009203 +009204 +009206 +009207 +009210 +009211 +009216 +009217 +009219 +009220 +009222 +009223 +009225 +009226 +009228 +009229 +009231 +009232 +009233 +009234 +009235 +009237 +009240 +009241 +009243 +009248 +009253 +009256 +009257 +009258 +009260 +009261 +009262 +009263 +009264 +009265 +009266 +009267 +009274 +009275 +009276 +009277 +009280 +009284 +009292 +009293 +009294 +009297 +009298 +009300 +009301 +009302 +009304 +009305 +009310 +009311 +009313 +009314 +009317 +009319 +009320 +009321 +009322 +009328 +009329 +009332 +009335 +009338 +009340 +009341 +009344 +009345 +009346 +009352 +009353 +009355 +009356 +009357 +009360 +009361 +009363 +009364 +009366 +009367 +009369 +009370 +009372 +009376 +009379 +009380 +009381 +009383 +009384 +009385 +009387 +009390 +009391 +009395 +009396 +009397 +009399 +009400 +009402 +009403 +009404 +009415 +009416 +009423 +009425 +009426 +009427 +009428 +009430 +009431 +009435 +009436 +009441 +009442 +009444 +009447 +009449 +009450 +009451 +009452 +009453 +009462 +009467 +009471 +009473 +009474 +009475 +009478 +009482 +009483 +009485 +009486 +009487 +009489 +009492 +009493 +009495 +009498 +009501 +009503 +009505 +009506 +009509 +009510 +009511 +009513 +009514 +009521 +009522 +009525 +009529 +009530 +009534 +009535 +009536 +009538 +009539 +009544 +009547 +009548 +009552 +009553 +009554 +009555 +009556 +009559 +009561 +009563 +009564 +009569 +009570 +009572 +009574 +009575 +009578 +009581 +009582 +009583 +009589 +009590 +009592 +009593 +009594 +009595 +009599 +009601 +009602 +009604 +009606 +009607 +009608 +009610 +009612 +009616 +009622 +009624 +009625 +009626 +009628 +009630 +009631 +009632 +009633 +009635 +009639 +009640 +009642 +009643 +009645 +009646 +009648 +009651 +009652 +009653 +009657 +009660 +009661 +009662 +009663 +009665 +009669 +009672 +009673 +009674 +009675 +009677 +009680 +009682 +009683 +009688 +009689 +009690 +009694 +009696 +009697 +009701 +009704 +009705 +009708 +009714 +009715 +009716 +009720 +009722 +009723 +009725 +009727 +009728 +009730 +009731 +009736 +009739 +009740 +009741 +009742 +009744 +009750 +009751 +009752 +009753 +009757 +009759 +009760 +009765 +009766 +009768 +009769 +009770 +009771 +009775 +009777 +009779 +009782 +009783 +009784 +009786 +009787 +009788 +009791 +009793 +009795 +009798 +009799 +009802 +009803 +009804 +009806 +009811 +009812 +009814 +009815 +009817 +009818 +009820 +009821 +009824 +009826 +009827 +009829 +009835 +009837 +009838 +009840 +009843 +009844 +009846 +009847 +009849 +009850 +009853 +009854 +009856 +009857 +009861 +009864 +009866 +009871 +009873 +009875 +009876 +009883 +009885 +009888 +009889 +009890 +009891 +009892 +009893 +009895 +009899 +009901 +009903 +009906 +009907 +009909 +009910 +009912 +009914 +009915 +009916 +009919 +009921 +009922 +009924 +009925 +009927 +009928 +009929 +009930 +009931 +009933 +009934 +009936 +009937 +009941 +009943 +009948 +009951 +009952 +009953 +009956 +009957 +009960 +009962 +009963 diff --git a/ImageSets/Main/train.txt b/ImageSets/Main/train.txt new file mode 100644 index 0000000..4647eec --- /dev/null +++ b/ImageSets/Main/train.txt @@ -0,0 +1,8218 @@ +000012 +000017 +000023 +000026 +000032 +000033 +000034 +000035 +000036 +000042 +000044 +000047 +000048 +000061 +000064 +000066 +000073 +000077 +000078 +000083 +000089 +000091 +000104 +000112 +000122 +000129 +000133 +000134 +000138 +000140 +000141 +000147 +000153 +000154 +000159 +000161 +000162 +000163 +000164 +000171 +000173 +000174 +000187 +000189 +000192 +000193 +000194 +000198 +000200 +000207 +000209 +000219 +000220 +000222 +000225 +000228 +000235 +000242 +000250 +000256 +000259 +000262 +000263 +000276 +000278 +000282 +000288 +000294 +000296 +000306 +000307 +000311 +000312 +000317 +000320 +000325 +000331 +000334 +000337 +000344 +000347 +000349 +000355 +000359 +000367 +000370 +000372 +000379 +000382 +000387 +000391 +000394 +000395 +000400 +000404 +000406 +000407 +000411 +000416 +000430 +000431 +000438 +000446 +000450 +000454 +000463 +000468 +000469 +000470 +000474 +000476 +000477 +000484 +000489 +000496 +000503 +000508 +000516 +000518 +000519 +000522 +000524 +000525 +000526 +000528 +000535 +000537 +000541 +000544 +000549 +000550 +000552 +000554 +000555 +000559 +000565 +000577 +000583 +000589 +000590 +000592 +000597 +000605 +000609 +000612 +000620 +000622 +000625 +000632 +000633 +000635 +000648 +000654 +000657 +000671 +000672 +000680 +000685 +000688 +000689 +000695 +000699 +000700 +000709 +000710 +000711 +000726 +000729 +000731 +000733 +000739 +000740 +000753 +000754 +000761 +000764 +000767 +000768 +000770 +000774 +000793 +000796 +000804 +000805 +000810 +000812 +000818 +000820 +000822 +000823 +000827 +000828 +000829 +000830 +000845 +000849 +000850 +000851 +000859 +000860 +000865 +000867 +000871 +000887 +000888 +000889 +000892 +000898 +000899 +000900 +000902 +000904 +000906 +000908 +000912 +000915 +000919 +000929 +000936 +000943 +000950 +000951 +000954 +000958 +000962 +000964 +000965 +000966 +000967 +000977 +000980 +000987 +000989 +000991 +000993 +000996 +000997 +000999 +001001 +001002 +001008 +001010 +001011 +001014 +001015 +001024 +001036 +001043 +001050 +001057 +001060 +001064 +001068 +001071 +001073 +001077 +001078 +001079 +001082 +001101 +001106 +001112 +001113 +001119 +001127 +001129 +001130 +001140 +001147 +001151 +001152 +001156 +001158 +001168 +001171 +001172 +001174 +001182 +001191 +001194 +001204 +001205 +001207 +001209 +001212 +001214 +001226 +001229 +001230 +001234 +001237 +001239 +001240 +001248 +001258 +001263 +001268 +001269 +001270 +001273 +001279 +001287 +001294 +001299 +001304 +001309 +001312 +001314 +001315 +001323 +001325 +001326 +001327 +001332 +001333 +001334 +001345 +001346 +001348 +001364 +001365 +001378 +001384 +001385 +001388 +001390 +001393 +001395 +001402 +001404 +001405 +001406 +001408 +001409 +001414 +001418 +001420 +001421 +001426 +001427 +001434 +001436 +001442 +001450 +001451 +001453 +001455 +001457 +001468 +001470 +001479 +001480 +001483 +001485 +001486 +001488 +001492 +001494 +001498 +001499 +001501 +001504 +001512 +001515 +001517 +001521 +001524 +001526 +001528 +001529 +001532 +001539 +001548 +001555 +001556 +001557 +001559 +001563 +001576 +001579 +001580 +001586 +001590 +001593 +001594 +001597 +001604 +001607 +001610 +001611 +001612 +001622 +001627 +001630 +001633 +001636 +001643 +001649 +001650 +001651 +001654 +001661 +001662 +001669 +001673 +001676 +001680 +001683 +001684 +001688 +001690 +001699 +001707 +001708 +001711 +001713 +001714 +001717 +001721 +001723 +001729 +001732 +001733 +001734 +001738 +001739 +001741 +001750 +001752 +001754 +001758 +001759 +001761 +001765 +001766 +001768 +001777 +001780 +001787 +001789 +001800 +001806 +001809 +001810 +001821 +001825 +001828 +001832 +001834 +001836 +001840 +001841 +001843 +001845 +001853 +001854 +001858 +001861 +001864 +001870 +001881 +001892 +001894 +001896 +001898 +001902 +001903 +001904 +001906 +001915 +001922 +001928 +001930 +001937 +001938 +001941 +001945 +001950 +001952 +001954 +001960 +001963 +001971 +001977 +001978 +001980 +001981 +001985 +001989 +001995 +001999 +002001 +002002 +002004 +002006 +002012 +002015 +002020 +002025 +002027 +002034 +002037 +002039 +002042 +002043 +002047 +002049 +002051 +002055 +002056 +002061 +002068 +002069 +002095 +002096 +002104 +002108 +002116 +002117 +002120 +002126 +002132 +002134 +002139 +002151 +002153 +002155 +002156 +002158 +002166 +002170 +002172 +002176 +002178 +002179 +002180 +002182 +002186 +002187 +002191 +002192 +002193 +002194 +002196 +002197 +002199 +002208 +002212 +002215 +002219 +002221 +002224 +002234 +002237 +002238 +002241 +002247 +002249 +002253 +002255 +002256 +002260 +002265 +002277 +002279 +002280 +002284 +002287 +002291 +002293 +002306 +002307 +002310 +002311 +002315 +002318 +002320 +002321 +002323 +002334 +002335 +002342 +002347 +002350 +002354 +002355 +002359 +002362 +002368 +002373 +002384 +002392 +002401 +002403 +002405 +002410 +002411 +002413 +002419 +002420 +002423 +002433 +002436 +002439 +002442 +002443 +002445 +002448 +002458 +002461 +002465 +002466 +002468 +002471 +002472 +002478 +002480 +002481 +002483 +002490 +002494 +002496 +002500 +002501 +002502 +002512 +002514 +002518 +002519 +002533 +002534 +002539 +002544 +002545 +002547 +002554 +002555 +002558 +002559 +002564 +002569 +002571 +002572 +002579 +002590 +002594 +002595 +002599 +002603 +002609 +002611 +002625 +002627 +002634 +002635 +002645 +002647 +002648 +002653 +002662 +002664 +002666 +002669 +002680 +002682 +002683 +002684 +002691 +002697 +002702 +002704 +002710 +002713 +002715 +002722 +002730 +002735 +002737 +002738 +002744 +002745 +002749 +002755 +002757 +002759 +002763 +002765 +002766 +002774 +002778 +002779 +002782 +002783 +002791 +002795 +002796 +002801 +002804 +002807 +002816 +002817 +002820 +002826 +002834 +002841 +002844 +002845 +002848 +002855 +002858 +002864 +002866 +002867 +002868 +002869 +002870 +002873 +002881 +002899 +002906 +002914 +002919 +002931 +002934 +002937 +002939 +002953 +002956 +002957 +002958 +002962 +002969 +002975 +002976 +002987 +002988 +002989 +002990 +002992 +002995 +003002 +003003 +003007 +003011 +003013 +003024 +003027 +003034 +003042 +003047 +003051 +003053 +003061 +003063 +003066 +003074 +003077 +003083 +003085 +003088 +003092 +003100 +003103 +003105 +003106 +003107 +003108 +003110 +003116 +003122 +003124 +003133 +003134 +003135 +003138 +003140 +003145 +003146 +003147 +003149 +003150 +003155 +003157 +003159 +003161 +003163 +003165 +003169 +003175 +003181 +003183 +003184 +003185 +003188 +003202 +003204 +003205 +003211 +003214 +003229 +003231 +003233 +003236 +003240 +003242 +003244 +003247 +003253 +003254 +003259 +003260 +003261 +003269 +003270 +003273 +003279 +003280 +003282 +003284 +003290 +003292 +003303 +003308 +003320 +003330 +003331 +003336 +003337 +003338 +003339 +003343 +003349 +003350 +003354 +003355 +003356 +003359 +003363 +003365 +003367 +003369 +003373 +003374 +003379 +003380 +003382 +003392 +003395 +003396 +003401 +003406 +003408 +003412 +003413 +003416 +003417 +003420 +003421 +003424 +003430 +003433 +003436 +003439 +003441 +003450 +003452 +003466 +003477 +003484 +003487 +003489 +003491 +003493 +003496 +003497 +003499 +003500 +003506 +003508 +003509 +003510 +003511 +003522 +003524 +003525 +003529 +003539 +003548 +003549 +003550 +003551 +003555 +003564 +003565 +003575 +003576 +003577 +003585 +003586 +003588 +003596 +003599 +003603 +003604 +003605 +003608 +003609 +003614 +003621 +003622 +003625 +003627 +003628 +003629 +003634 +003635 +003642 +003644 +003645 +003646 +003656 +003658 +003662 +003663 +003664 +003671 +003678 +003679 +003681 +003688 +003694 +003695 +003698 +003699 +003700 +003704 +003705 +003713 +003714 +003732 +003735 +003740 +003743 +003748 +003749 +003751 +003752 +003758 +003759 +003763 +003767 +003773 +003779 +003781 +003784 +003786 +003788 +003790 +003792 +003797 +003806 +003807 +003811 +003817 +003818 +003824 +003827 +003828 +003830 +003834 +003835 +003847 +003849 +003856 +003859 +003860 +003861 +003865 +003866 +003874 +003879 +003887 +003889 +003890 +003898 +003899 +003907 +003912 +003913 +003921 +003932 +003935 +003936 +003939 +003945 +003949 +003953 +003956 +003961 +003969 +003970 +003971 +003974 +003983 +003987 +003988 +003991 +003993 +003997 +003998 +004005 +004008 +004009 +004012 +004013 +004014 +004016 +004017 +004019 +004023 +004028 +004033 +004034 +004035 +004037 +004046 +004052 +004058 +004067 +004091 +004092 +004093 +004095 +004100 +004106 +004111 +004120 +004121 +004129 +004131 +004133 +004136 +004137 +004138 +004140 +004146 +004149 +004152 +004158 +004163 +004164 +004168 +004169 +004170 +004171 +004189 +004190 +004196 +004200 +004201 +004209 +004215 +004220 +004221 +004223 +004224 +004228 +004231 +004232 +004237 +004241 +004242 +004244 +004247 +004253 +004255 +004256 +004263 +004269 +004270 +004271 +004272 +004273 +004280 +004281 +004283 +004287 +004291 +004292 +004296 +004300 +004303 +004307 +004315 +004318 +004322 +004325 +004327 +004333 +004338 +004339 +004345 +004347 +004359 +004360 +004361 +004365 +004367 +004370 +004371 +004372 +004376 +004379 +004386 +004387 +004389 +004391 +004392 +004404 +004434 +004436 +004439 +004441 +004452 +004470 +004471 +004479 +004481 +004484 +004496 +004500 +004502 +004508 +004510 +004514 +004517 +004519 +004520 +004524 +004526 +004537 +004540 +004544 +004548 +004549 +004551 +004553 +004562 +004563 +004565 +004566 +004570 +004571 +004576 +004579 +004584 +004587 +004591 +004595 +004597 +004604 +004605 +004607 +004611 +004612 +004622 +004623 +004625 +004627 +004628 +004631 +004634 +004636 +004643 +004644 +004648 +004651 +004656 +004671 +004675 +004679 +004683 +004685 +004686 +004687 +004691 +004693 +004694 +004701 +004702 +004705 +004706 +004710 +004714 +004715 +004718 +004723 +004735 +004737 +004742 +004743 +004747 +004748 +004753 +004754 +004760 +004773 +004776 +004779 +004782 +004783 +004790 +004792 +004793 +004794 +004797 +004799 +004801 +004808 +004815 +004823 +004828 +004830 +004832 +004836 +004837 +004841 +004842 +004846 +004848 +004849 +004857 +004869 +004873 +004876 +004879 +004882 +004885 +004897 +004898 +004902 +004905 +004907 +004910 +004911 +004913 +004929 +004946 +004951 +004955 +004958 +004961 +004962 +004966 +004968 +004972 +004973 +004974 +004976 +004984 +004987 +004990 +004991 +004992 +004995 +005001 +005004 +005006 +005007 +005016 +005018 +005020 +005023 +005024 +005026 +005027 +005029 +005032 +005033 +005045 +005047 +005052 +005057 +005058 +005061 +005065 +005068 +005071 +005073 +005078 +005084 +005086 +005090 +005093 +005094 +005097 +005101 +005107 +005108 +005114 +005121 +005122 +005124 +005129 +005130 +005134 +005138 +005143 +005153 +005156 +005168 +005169 +005171 +005173 +005177 +005181 +005183 +005186 +005189 +005190 +005191 +005202 +005203 +005208 +005215 +005217 +005219 +005223 +005231 +005236 +005244 +005245 +005246 +005257 +005258 +005259 +005260 +005262 +005269 +005273 +005283 +005285 +005288 +005290 +005292 +005297 +005303 +005304 +005307 +005310 +005311 +005318 +005327 +005336 +005337 +005338 +005344 +005345 +005351 +005358 +005360 +005363 +005368 +005369 +005373 +005374 +005387 +005388 +005389 +005391 +005396 +005404 +005405 +005406 +005408 +005410 +005413 +005414 +005417 +005420 +005424 +005433 +005440 +005445 +005448 +005450 +005451 +005453 +005455 +005457 +005467 +005478 +005483 +005487 +005489 +005496 +005499 +005508 +005509 +005511 +005514 +005515 +005519 +005524 +005526 +005527 +005536 +005541 +005542 +005544 +005547 +005563 +005566 +005568 +005574 +005579 +005582 +005585 +005591 +005592 +005599 +005600 +005601 +005603 +005605 +005609 +005611 +005624 +005625 +005630 +005631 +005636 +005637 +005639 +005644 +005648 +005654 +005658 +005668 +005669 +005680 +005686 +005695 +005697 +005699 +005700 +005704 +005705 +005710 +005713 +005715 +005718 +005728 +005730 +005731 +005735 +005738 +005740 +005742 +005752 +005756 +005757 +005764 +005765 +005769 +005780 +005782 +005783 +005784 +005786 +005789 +005796 +005803 +005805 +005806 +005813 +005814 +005817 +005821 +005824 +005826 +005831 +005836 +005838 +005840 +005843 +005850 +005851 +005859 +005860 +005861 +005864 +005867 +005873 +005881 +005884 +005885 +005888 +005889 +005893 +005895 +005899 +005901 +005903 +005905 +005908 +005909 +005910 +005911 +005918 +005920 +005923 +005930 +005938 +005947 +005948 +005951 +005960 +005961 +005964 +005971 +005980 +005983 +005984 +005990 +005992 +006004 +006009 +006011 +006020 +006023 +006025 +006030 +006033 +006038 +006043 +006061 +006065 +006066 +006067 +006070 +006073 +006074 +006078 +006079 +006088 +006091 +006095 +006096 +006100 +006103 +006104 +006105 +006123 +006128 +006130 +006131 +006134 +006135 +006140 +006141 +006156 +006158 +006162 +006166 +006170 +006171 +006172 +006174 +006175 +006176 +006177 +006179 +006180 +006181 +006183 +006187 +006189 +006196 +006208 +006210 +006221 +006223 +006224 +006225 +006229 +006230 +006236 +006238 +006243 +006247 +006250 +006251 +006261 +006262 +006264 +006267 +006270 +006272 +006275 +006279 +006285 +006289 +006290 +006291 +006299 +006304 +006305 +006320 +006329 +006341 +006344 +006349 +006352 +006353 +006362 +006363 +006366 +006367 +006369 +006371 +006374 +006375 +006381 +006382 +006395 +006400 +006411 +006417 +006418 +006419 +006427 +006429 +006433 +006434 +006436 +006438 +006442 +006447 +006448 +006455 +006458 +006459 +006462 +006466 +006470 +006472 +006474 +006475 +006476 +006482 +006483 +006486 +006495 +006499 +006501 +006503 +006506 +006515 +006523 +006524 +006536 +006547 +006548 +006549 +006550 +006551 +006556 +006560 +006564 +006569 +006595 +006597 +006602 +006605 +006609 +006610 +006612 +006622 +006626 +006627 +006635 +006636 +006637 +006638 +006648 +006652 +006654 +006658 +006660 +006674 +006684 +006689 +006694 +006695 +006697 +006698 +006703 +006704 +006706 +006707 +006708 +006714 +006726 +006727 +006731 +006734 +006735 +006736 +006738 +006740 +006748 +006753 +006755 +006766 +006773 +006777 +006781 +006782 +006784 +006794 +006805 +006806 +006810 +006822 +006824 +006825 +006833 +006836 +006839 +006840 +006844 +006845 +006847 +006848 +006849 +006852 +006858 +006864 +006866 +006868 +006869 +006874 +006883 +006887 +006893 +006896 +006899 +006900 +006909 +006910 +006911 +006912 +006914 +006916 +006917 +006919 +006930 +006931 +006939 +006943 +006947 +006948 +006950 +006958 +006959 +006968 +006971 +006976 +006983 +007002 +007003 +007006 +007007 +007011 +007016 +007018 +007023 +007025 +007029 +007033 +007036 +007039 +007040 +007045 +007050 +007062 +007064 +007072 +007073 +007075 +007078 +007079 +007080 +007088 +007089 +007090 +007092 +007093 +007095 +007105 +007108 +007113 +007121 +007125 +007128 +007129 +007130 +007133 +007138 +007150 +007152 +007154 +007159 +007163 +007166 +007168 +007177 +007180 +007182 +007184 +007185 +007193 +007194 +007197 +007205 +007213 +007214 +007219 +007222 +007223 +007234 +007241 +007243 +007250 +007256 +007261 +007263 +007271 +007279 +007285 +007289 +007295 +007298 +007305 +007308 +007322 +007323 +007325 +007327 +007334 +007336 +007351 +007361 +007365 +007369 +007370 +007373 +007375 +007381 +007385 +007389 +007394 +007396 +007398 +007410 +007411 +007413 +007417 +007419 +007421 +007425 +007431 +007437 +007446 +007454 +007458 +007466 +007467 +007468 +007474 +007477 +007479 +007481 +007483 +007490 +007491 +007493 +007497 +007503 +007513 +007519 +007521 +007524 +007526 +007530 +007535 +007536 +007538 +007540 +007544 +007558 +007565 +007566 +007570 +007572 +007575 +007578 +007586 +007590 +007594 +007600 +007601 +007606 +007611 +007619 +007621 +007629 +007631 +007633 +007637 +007653 +007654 +007655 +007663 +007667 +007683 +007685 +007692 +007696 +007697 +007699 +007704 +007713 +007718 +007721 +007729 +007731 +007735 +007736 +007740 +007748 +007749 +007751 +007753 +007762 +007767 +007775 +007777 +007781 +007790 +007791 +007795 +007803 +007809 +007810 +007814 +007819 +007820 +007821 +007831 +007836 +007838 +007840 +007847 +007853 +007854 +007859 +007863 +007864 +007872 +007876 +007877 +007878 +007883 +007884 +007885 +007898 +007900 +007901 +007905 +007908 +007910 +007911 +007914 +007915 +007923 +007925 +007926 +007932 +007939 +007940 +007953 +007959 +007963 +007964 +007968 +007974 +007976 +007980 +007991 +007996 +008001 +008004 +008005 +008008 +008012 +008017 +008019 +008026 +008037 +008040 +008042 +008043 +008044 +008049 +008051 +008053 +008062 +008063 +008064 +008067 +008072 +008075 +008076 +008079 +008082 +008083 +008084 +008093 +008095 +008096 +008098 +008106 +008108 +008116 +008117 +008121 +008127 +008130 +008137 +008139 +008142 +008150 +008163 +008164 +008166 +008169 +008174 +008186 +008188 +008197 +008199 +008202 +008203 +008204 +008211 +008213 +008216 +008218 +008223 +008226 +008232 +008235 +008248 +008250 +008252 +008253 +008254 +008260 +008261 +008262 +008263 +008269 +008272 +008280 +008282 +008296 +008301 +008302 +008310 +008311 +008312 +008313 +008315 +008316 +008317 +008322 +008332 +008336 +008338 +008341 +008342 +008346 +008351 +008360 +008372 +008374 +008381 +008384 +008385 +008388 +008391 +008397 +008398 +008403 +008409 +008422 +008425 +008426 +008427 +008437 +008442 +008443 +008445 +008449 +008452 +008453 +008456 +008462 +008465 +008466 +008467 +008468 +008470 +008475 +008477 +008478 +008482 +008483 +008495 +008506 +008517 +008523 +008529 +008530 +008533 +008536 +008549 +008550 +008558 +008559 +008568 +008581 +008585 +008587 +008588 +008595 +008596 +008602 +008610 +008615 +008617 +008618 +008628 +008633 +008645 +008655 +008663 +008665 +008670 +008676 +008688 +008690 +008691 +008699 +008702 +008706 +008710 +008720 +008723 +008725 +008727 +008731 +008732 +008738 +008741 +008744 +008748 +008750 +008755 +008756 +008757 +008760 +008764 +008768 +008770 +008771 +008776 +008783 +008784 +008790 +008794 +008806 +008809 +008811 +008813 +008814 +008815 +008819 +008838 +008840 +008841 +008847 +008856 +008862 +008865 +008872 +008878 +008879 +008883 +008885 +008886 +008891 +008900 +008905 +008909 +008920 +008923 +008926 +008929 +008930 +008932 +008933 +008936 +008939 +008944 +008948 +008958 +008960 +008961 +008962 +008966 +008967 +008968 +008969 +008970 +008971 +008973 +008975 +008978 +008979 +008980 +008985 +008987 +008988 +008989 +008995 +008999 +009000 +009004 +009005 +009016 +009018 +009020 +009027 +009029 +009032 +009036 +009042 +009045 +009049 +009058 +009059 +009063 +009066 +009068 +009073 +009078 +009080 +009086 +009098 +009099 +009100 +009106 +009108 +009114 +009117 +009121 +009123 +009136 +009144 +009148 +009153 +009160 +009161 +009166 +009173 +009175 +009181 +009184 +009185 +009191 +009196 +009197 +009200 +009205 +009208 +009209 +009214 +009215 +009218 +009227 +009230 +009238 +009242 +009245 +009251 +009252 +009255 +009259 +009269 +009270 +009271 +009272 +009283 +009285 +009287 +009288 +009289 +009290 +009295 +009296 +009299 +009306 +009307 +009308 +009316 +009318 +009324 +009325 +009327 +009333 +009336 +009339 +009342 +009343 +009358 +009359 +009362 +009365 +009377 +009386 +009388 +009389 +009392 +009393 +009394 +009398 +009406 +009407 +009409 +009410 +009411 +009413 +009417 +009418 +009419 +009420 +009421 +009422 +009424 +009429 +009432 +009434 +009446 +009458 +009460 +009463 +009465 +009466 +009469 +009476 +009488 +009490 +009491 +009496 +009497 +009499 +009504 +009508 +009512 +009515 +009516 +009518 +009520 +009523 +009524 +009526 +009528 +009537 +009541 +009542 +009545 +009549 +009551 +009557 +009562 +009566 +009573 +009576 +009577 +009579 +009584 +009585 +009587 +009596 +009600 +009605 +009609 +009613 +009614 +009615 +009618 +009621 +009623 +009629 +009634 +009637 +009638 +009644 +009650 +009654 +009656 +009659 +009664 +009666 +009668 +009671 +009679 +009684 +009691 +009693 +009702 +009703 +009707 +009709 +009713 +009717 +009718 +009721 +009729 +009733 +009734 +009735 +009749 +009755 +009756 +009762 +009763 +009774 +009776 +009789 +009790 +009792 +009797 +009800 +009805 +009807 +009808 +009810 +009813 +009825 +009828 +009830 +009832 +009834 +009839 +009842 +009845 +009848 +009851 +009852 +009855 +009859 +009860 +009867 +009868 +009869 +009872 +009874 +009877 +009878 +009879 +009882 +009884 +009887 +009896 +009904 +009911 +009918 +009920 +009926 +009938 +009940 +009942 +009944 +009945 +009949 +009959 +009961 +2008_000008 +2008_000015 +2008_000019 +2008_000023 +2008_000028 +2008_000033 +2008_000036 +2008_000037 +2008_000041 +2008_000045 +2008_000053 +2008_000060 +2008_000066 +2008_000070 +2008_000074 +2008_000085 +2008_000089 +2008_000093 +2008_000095 +2008_000096 +2008_000097 +2008_000099 +2008_000103 +2008_000105 +2008_000109 +2008_000112 +2008_000128 +2008_000131 +2008_000132 +2008_000141 +2008_000142 +2008_000143 +2008_000144 +2008_000148 +2008_000151 +2008_000154 +2008_000162 +2008_000176 +2008_000181 +2008_000185 +2008_000187 +2008_000188 +2008_000189 +2008_000191 +2008_000192 +2008_000193 +2008_000196 +2008_000197 +2008_000199 +2008_000202 +2008_000207 +2008_000217 +2008_000226 +2008_000227 +2008_000235 +2008_000236 +2008_000237 +2008_000238 +2008_000252 +2008_000255 +2008_000259 +2008_000260 +2008_000262 +2008_000266 +2008_000273 +2008_000275 +2008_000283 +2008_000284 +2008_000287 +2008_000289 +2008_000290 +2008_000291 +2008_000297 +2008_000309 +2008_000311 +2008_000313 +2008_000315 +2008_000316 +2008_000318 +2008_000330 +2008_000335 +2008_000336 +2008_000338 +2008_000342 +2008_000343 +2008_000346 +2008_000348 +2008_000350 +2008_000356 +2008_000361 +2008_000364 +2008_000365 +2008_000371 +2008_000380 +2008_000392 +2008_000393 +2008_000397 +2008_000399 +2008_000400 +2008_000405 +2008_000415 +2008_000416 +2008_000421 +2008_000422 +2008_000426 +2008_000428 +2008_000432 +2008_000435 +2008_000436 +2008_000437 +2008_000442 +2008_000443 +2008_000445 +2008_000447 +2008_000448 +2008_000455 +2008_000461 +2008_000470 +2008_000471 +2008_000480 +2008_000488 +2008_000491 +2008_000493 +2008_000495 +2008_000499 +2008_000502 +2008_000505 +2008_000512 +2008_000514 +2008_000515 +2008_000527 +2008_000531 +2008_000540 +2008_000544 +2008_000545 +2008_000548 +2008_000552 +2008_000559 +2008_000561 +2008_000563 +2008_000567 +2008_000572 +2008_000578 +2008_000583 +2008_000584 +2008_000585 +2008_000588 +2008_000595 +2008_000607 +2008_000613 +2008_000615 +2008_000619 +2008_000626 +2008_000628 +2008_000636 +2008_000641 +2008_000645 +2008_000646 +2008_000648 +2008_000650 +2008_000655 +2008_000669 +2008_000672 +2008_000674 +2008_000676 +2008_000678 +2008_000683 +2008_000689 +2008_000694 +2008_000696 +2008_000703 +2008_000704 +2008_000711 +2008_000716 +2008_000719 +2008_000721 +2008_000723 +2008_000724 +2008_000726 +2008_000729 +2008_000732 +2008_000733 +2008_000742 +2008_000753 +2008_000756 +2008_000758 +2008_000760 +2008_000761 +2008_000764 +2008_000775 +2008_000777 +2008_000778 +2008_000785 +2008_000787 +2008_000790 +2008_000792 +2008_000798 +2008_000801 +2008_000808 +2008_000814 +2008_000815 +2008_000824 +2008_000829 +2008_000832 +2008_000833 +2008_000841 +2008_000842 +2008_000844 +2008_000847 +2008_000851 +2008_000854 +2008_000860 +2008_000861 +2008_000867 +2008_000870 +2008_000873 +2008_000875 +2008_000881 +2008_000883 +2008_000887 +2008_000899 +2008_000901 +2008_000902 +2008_000905 +2008_000908 +2008_000912 +2008_000914 +2008_000915 +2008_000923 +2008_000924 +2008_000928 +2008_000934 +2008_000941 +2008_000944 +2008_000953 +2008_000959 +2008_000970 +2008_000973 +2008_000979 +2008_000981 +2008_000985 +2008_000987 +2008_000999 +2008_001018 +2008_001020 +2008_001021 +2008_001022 +2008_001023 +2008_001026 +2008_001030 +2008_001031 +2008_001035 +2008_001036 +2008_001039 +2008_001042 +2008_001047 +2008_001048 +2008_001052 +2008_001054 +2008_001056 +2008_001057 +2008_001071 +2008_001073 +2008_001081 +2008_001083 +2008_001104 +2008_001105 +2008_001106 +2008_001112 +2008_001115 +2008_001118 +2008_001119 +2008_001130 +2008_001133 +2008_001134 +2008_001137 +2008_001143 +2008_001147 +2008_001158 +2008_001159 +2008_001161 +2008_001164 +2008_001169 +2008_001171 +2008_001182 +2008_001188 +2008_001189 +2008_001190 +2008_001196 +2008_001202 +2008_001203 +2008_001206 +2008_001208 +2008_001215 +2008_001219 +2008_001223 +2008_001230 +2008_001235 +2008_001238 +2008_001245 +2008_001263 +2008_001267 +2008_001272 +2008_001274 +2008_001278 +2008_001285 +2008_001294 +2008_001299 +2008_001302 +2008_001307 +2008_001310 +2008_001312 +2008_001325 +2008_001329 +2008_001335 +2008_001336 +2008_001346 +2008_001351 +2008_001357 +2008_001358 +2008_001359 +2008_001373 +2008_001375 +2008_001382 +2008_001383 +2008_001385 +2008_001387 +2008_001389 +2008_001390 +2008_001399 +2008_001402 +2008_001405 +2008_001408 +2008_001413 +2008_001414 +2008_001419 +2008_001420 +2008_001431 +2008_001434 +2008_001440 +2008_001444 +2008_001446 +2008_001448 +2008_001454 +2008_001455 +2008_001460 +2008_001461 +2008_001462 +2008_001464 +2008_001467 +2008_001479 +2008_001482 +2008_001488 +2008_001493 +2008_001495 +2008_001498 +2008_001500 +2008_001501 +2008_001510 +2008_001523 +2008_001525 +2008_001529 +2008_001533 +2008_001538 +2008_001541 +2008_001550 +2008_001563 +2008_001566 +2008_001576 +2008_001577 +2008_001582 +2008_001591 +2008_001592 +2008_001601 +2008_001609 +2008_001610 +2008_001615 +2008_001617 +2008_001620 +2008_001626 +2008_001631 +2008_001632 +2008_001641 +2008_001643 +2008_001645 +2008_001652 +2008_001653 +2008_001661 +2008_001666 +2008_001670 +2008_001673 +2008_001679 +2008_001690 +2008_001691 +2008_001692 +2008_001694 +2008_001699 +2008_001704 +2008_001706 +2008_001708 +2008_001709 +2008_001710 +2008_001716 +2008_001719 +2008_001724 +2008_001729 +2008_001735 +2008_001737 +2008_001741 +2008_001744 +2008_001746 +2008_001751 +2008_001758 +2008_001761 +2008_001770 +2008_001775 +2008_001781 +2008_001783 +2008_001787 +2008_001789 +2008_001791 +2008_001796 +2008_001797 +2008_001801 +2008_001809 +2008_001811 +2008_001813 +2008_001829 +2008_001832 +2008_001834 +2008_001836 +2008_001837 +2008_001842 +2008_001845 +2008_001849 +2008_001852 +2008_001854 +2008_001856 +2008_001860 +2008_001865 +2008_001866 +2008_001872 +2008_001876 +2008_001880 +2008_001881 +2008_001882 +2008_001888 +2008_001894 +2008_001896 +2008_001903 +2008_001911 +2008_001921 +2008_001926 +2008_001929 +2008_001937 +2008_001941 +2008_001947 +2008_001955 +2008_001956 +2008_001957 +2008_001967 +2008_001970 +2008_001977 +2008_001980 +2008_001982 +2008_001986 +2008_001997 +2008_002000 +2008_002001 +2008_002002 +2008_002005 +2008_002009 +2008_002023 +2008_002032 +2008_002033 +2008_002056 +2008_002058 +2008_002061 +2008_002062 +2008_002064 +2008_002066 +2008_002067 +2008_002073 +2008_002079 +2008_002080 +2008_002093 +2008_002094 +2008_002096 +2008_002103 +2008_002112 +2008_002116 +2008_002117 +2008_002118 +2008_002119 +2008_002123 +2008_002129 +2008_002131 +2008_002145 +2008_002148 +2008_002150 +2008_002156 +2008_002160 +2008_002162 +2008_002175 +2008_002177 +2008_002181 +2008_002182 +2008_002195 +2008_002197 +2008_002200 +2008_002202 +2008_002204 +2008_002206 +2008_002208 +2008_002210 +2008_002215 +2008_002218 +2008_002220 +2008_002221 +2008_002225 +2008_002227 +2008_002229 +2008_002236 +2008_002243 +2008_002244 +2008_002247 +2008_002248 +2008_002251 +2008_002255 +2008_002258 +2008_002262 +2008_002270 +2008_002278 +2008_002279 +2008_002280 +2008_002281 +2008_002288 +2008_002294 +2008_002296 +2008_002299 +2008_002304 +2008_002307 +2008_002311 +2008_002317 +2008_002325 +2008_002327 +2008_002329 +2008_002331 +2008_002335 +2008_002338 +2008_002340 +2008_002343 +2008_002344 +2008_002350 +2008_002357 +2008_002361 +2008_002362 +2008_002365 +2008_002368 +2008_002369 +2008_002370 +2008_002377 +2008_002389 +2008_002399 +2008_002405 +2008_002411 +2008_002418 +2008_002422 +2008_002425 +2008_002434 +2008_002437 +2008_002441 +2008_002442 +2008_002448 +2008_002457 +2008_002458 +2008_002459 +2008_002461 +2008_002465 +2008_002466 +2008_002471 +2008_002473 +2008_002482 +2008_002484 +2008_002487 +2008_002491 +2008_002501 +2008_002506 +2008_002514 +2008_002515 +2008_002524 +2008_002533 +2008_002541 +2008_002543 +2008_002547 +2008_002549 +2008_002551 +2008_002555 +2008_002562 +2008_002566 +2008_002568 +2008_002574 +2008_002575 +2008_002578 +2008_002583 +2008_002584 +2008_002601 +2008_002612 +2008_002613 +2008_002621 +2008_002622 +2008_002625 +2008_002634 +2008_002638 +2008_002641 +2008_002647 +2008_002648 +2008_002649 +2008_002650 +2008_002662 +2008_002665 +2008_002666 +2008_002668 +2008_002672 +2008_002674 +2008_002675 +2008_002676 +2008_002679 +2008_002686 +2008_002697 +2008_002698 +2008_002704 +2008_002710 +2008_002712 +2008_002718 +2008_002719 +2008_002728 +2008_002730 +2008_002733 +2008_002736 +2008_002741 +2008_002749 +2008_002750 +2008_002758 +2008_002760 +2008_002762 +2008_002767 +2008_002772 +2008_002774 +2008_002776 +2008_002784 +2008_002787 +2008_002791 +2008_002793 +2008_002794 +2008_002804 +2008_002808 +2008_002813 +2008_002823 +2008_002834 +2008_002842 +2008_002848 +2008_002850 +2008_002854 +2008_002856 +2008_002857 +2008_002866 +2008_002868 +2008_002872 +2008_002873 +2008_002880 +2008_002885 +2008_002887 +2008_002890 +2008_002891 +2008_002892 +2008_002894 +2008_002903 +2008_002913 +2008_002916 +2008_002917 +2008_002922 +2008_002930 +2008_002931 +2008_002943 +2008_002948 +2008_002951 +2008_002954 +2008_002955 +2008_002957 +2008_002960 +2008_002961 +2008_002966 +2008_002970 +2008_002972 +2008_002977 +2008_002983 +2008_002984 +2008_002985 +2008_002988 +2008_002993 +2008_002997 +2008_003013 +2008_003015 +2008_003017 +2008_003018 +2008_003021 +2008_003023 +2008_003025 +2008_003033 +2008_003037 +2008_003039 +2008_003041 +2008_003043 +2008_003048 +2008_003049 +2008_003057 +2008_003059 +2008_003060 +2008_003061 +2008_003063 +2008_003065 +2008_003068 +2008_003075 +2008_003079 +2008_003081 +2008_003083 +2008_003087 +2008_003093 +2008_003094 +2008_003099 +2008_003101 +2008_003112 +2008_003114 +2008_003120 +2008_003122 +2008_003127 +2008_003128 +2008_003134 +2008_003140 +2008_003143 +2008_003146 +2008_003147 +2008_003151 +2008_003154 +2008_003157 +2008_003160 +2008_003168 +2008_003180 +2008_003182 +2008_003191 +2008_003196 +2008_003200 +2008_003203 +2008_003208 +2008_003209 +2008_003213 +2008_003224 +2008_003231 +2008_003242 +2008_003244 +2008_003248 +2008_003251 +2008_003252 +2008_003261 +2008_003264 +2008_003265 +2008_003266 +2008_003269 +2008_003272 +2008_003275 +2008_003276 +2008_003277 +2008_003283 +2008_003287 +2008_003288 +2008_003290 +2008_003297 +2008_003302 +2008_003303 +2008_003304 +2008_003311 +2008_003313 +2008_003318 +2008_003321 +2008_003323 +2008_003329 +2008_003335 +2008_003338 +2008_003342 +2008_003347 +2008_003360 +2008_003362 +2008_003373 +2008_003378 +2008_003380 +2008_003381 +2008_003386 +2008_003393 +2008_003394 +2008_003406 +2008_003409 +2008_003414 +2008_003415 +2008_003417 +2008_003418 +2008_003426 +2008_003429 +2008_003430 +2008_003434 +2008_003435 +2008_003437 +2008_003447 +2008_003448 +2008_003452 +2008_003458 +2008_003462 +2008_003463 +2008_003469 +2008_003478 +2008_003480 +2008_003485 +2008_003488 +2008_003489 +2008_003496 +2008_003497 +2008_003498 +2008_003500 +2008_003501 +2008_003504 +2008_003507 +2008_003510 +2008_003515 +2008_003520 +2008_003521 +2008_003522 +2008_003523 +2008_003533 +2008_003534 +2008_003544 +2008_003559 +2008_003560 +2008_003562 +2008_003571 +2008_003575 +2008_003578 +2008_003579 +2008_003582 +2008_003585 +2008_003587 +2008_003589 +2008_003590 +2008_003596 +2008_003608 +2008_003611 +2008_003617 +2008_003622 +2008_003626 +2008_003629 +2008_003635 +2008_003637 +2008_003645 +2008_003652 +2008_003653 +2008_003655 +2008_003659 +2008_003665 +2008_003667 +2008_003674 +2008_003675 +2008_003677 +2008_003682 +2008_003685 +2008_003688 +2008_003689 +2008_003691 +2008_003697 +2008_003701 +2008_003703 +2008_003706 +2008_003707 +2008_003712 +2008_003719 +2008_003726 +2008_003729 +2008_003732 +2008_003746 +2008_003748 +2008_003761 +2008_003762 +2008_003764 +2008_003769 +2008_003773 +2008_003774 +2008_003776 +2008_003779 +2008_003781 +2008_003788 +2008_003791 +2008_003796 +2008_003802 +2008_003811 +2008_003814 +2008_003815 +2008_003819 +2008_003831 +2008_003841 +2008_003842 +2008_003847 +2008_003849 +2008_003852 +2008_003854 +2008_003864 +2008_003866 +2008_003870 +2008_003871 +2008_003882 +2008_003883 +2008_003888 +2008_003891 +2008_003892 +2008_003908 +2008_003913 +2008_003914 +2008_003916 +2008_003920 +2008_003922 +2008_003925 +2008_003939 +2008_003942 +2008_003947 +2008_003956 +2008_003966 +2008_003967 +2008_003970 +2008_003974 +2008_003975 +2008_003978 +2008_003984 +2008_003985 +2008_003986 +2008_003992 +2008_003995 +2008_003998 +2008_004000 +2008_004004 +2008_004008 +2008_004014 +2008_004017 +2008_004021 +2008_004022 +2008_004024 +2008_004026 +2008_004036 +2008_004037 +2008_004042 +2008_004044 +2008_004053 +2008_004055 +2008_004066 +2008_004074 +2008_004077 +2008_004080 +2008_004084 +2008_004087 +2008_004092 +2008_004097 +2008_004100 +2008_004102 +2008_004106 +2008_004110 +2008_004112 +2008_004113 +2008_004120 +2008_004122 +2008_004130 +2008_004134 +2008_004138 +2008_004145 +2008_004147 +2008_004148 +2008_004161 +2008_004163 +2008_004165 +2008_004171 +2008_004176 +2008_004195 +2008_004196 +2008_004201 +2008_004208 +2008_004217 +2008_004218 +2008_004224 +2008_004231 +2008_004232 +2008_004235 +2008_004239 +2008_004246 +2008_004247 +2008_004259 +2008_004265 +2008_004269 +2008_004274 +2008_004276 +2008_004280 +2008_004284 +2008_004287 +2008_004288 +2008_004291 +2008_004293 +2008_004296 +2008_004301 +2008_004303 +2008_004307 +2008_004313 +2008_004314 +2008_004319 +2008_004321 +2008_004325 +2008_004328 +2008_004331 +2008_004342 +2008_004353 +2008_004358 +2008_004362 +2008_004365 +2008_004372 +2008_004376 +2008_004378 +2008_004380 +2008_004385 +2008_004387 +2008_004398 +2008_004403 +2008_004410 +2008_004411 +2008_004412 +2008_004416 +2008_004418 +2008_004428 +2008_004430 +2008_004435 +2008_004436 +2008_004439 +2008_004441 +2008_004443 +2008_004450 +2008_004452 +2008_004457 +2008_004458 +2008_004462 +2008_004480 +2008_004488 +2008_004490 +2008_004492 +2008_004493 +2008_004499 +2008_004505 +2008_004506 +2008_004512 +2008_004513 +2008_004515 +2008_004518 +2008_004519 +2008_004532 +2008_004539 +2008_004544 +2008_004545 +2008_004547 +2008_004551 +2008_004559 +2008_004567 +2008_004568 +2008_004570 +2008_004574 +2008_004581 +2008_004583 +2008_004584 +2008_004585 +2008_004588 +2008_004590 +2008_004593 +2008_004602 +2008_004603 +2008_004607 +2008_004611 +2008_004616 +2008_004617 +2008_004620 +2008_004629 +2008_004631 +2008_004634 +2008_004635 +2008_004636 +2008_004648 +2008_004649 +2008_004661 +2008_004663 +2008_004666 +2008_004667 +2008_004668 +2008_004671 +2008_004672 +2008_004677 +2008_004678 +2008_004679 +2008_004690 +2008_004692 +2008_004697 +2008_004703 +2008_004707 +2008_004713 +2008_004719 +2008_004725 +2008_004732 +2008_004739 +2008_004749 +2008_004750 +2008_004752 +2008_004763 +2008_004764 +2008_004767 +2008_004770 +2008_004776 +2008_004777 +2008_004781 +2008_004783 +2008_004786 +2008_004802 +2008_004804 +2008_004807 +2008_004808 +2008_004821 +2008_004822 +2008_004827 +2008_004834 +2008_004838 +2008_004841 +2008_004844 +2008_004845 +2008_004847 +2008_004849 +2008_004850 +2008_004856 +2008_004858 +2008_004868 +2008_004869 +2008_004872 +2008_004874 +2008_004876 +2008_004892 +2008_004893 +2008_004899 +2008_004903 +2008_004908 +2008_004911 +2008_004914 +2008_004917 +2008_004920 +2008_004931 +2008_004934 +2008_004937 +2008_004938 +2008_004945 +2008_004946 +2008_004950 +2008_004961 +2008_004964 +2008_004966 +2008_004969 +2008_004970 +2008_004973 +2008_004976 +2008_004977 +2008_004981 +2008_004983 +2008_004985 +2008_004990 +2008_004991 +2008_004998 +2008_005000 +2008_005006 +2008_005013 +2008_005016 +2008_005033 +2008_005036 +2008_005040 +2008_005042 +2008_005045 +2008_005051 +2008_005055 +2008_005064 +2008_005066 +2008_005071 +2008_005074 +2008_005078 +2008_005080 +2008_005081 +2008_005082 +2008_005084 +2008_005088 +2008_005090 +2008_005094 +2008_005101 +2008_005108 +2008_005115 +2008_005127 +2008_005132 +2008_005133 +2008_005134 +2008_005136 +2008_005137 +2008_005146 +2008_005150 +2008_005158 +2008_005159 +2008_005168 +2008_005171 +2008_005172 +2008_005174 +2008_005178 +2008_005186 +2008_005193 +2008_005196 +2008_005201 +2008_005209 +2008_005213 +2008_005214 +2008_005216 +2008_005218 +2008_005220 +2008_005221 +2008_005231 +2008_005234 +2008_005236 +2008_005240 +2008_005247 +2008_005248 +2008_005250 +2008_005266 +2008_005269 +2008_005271 +2008_005279 +2008_005281 +2008_005283 +2008_005294 +2008_005295 +2008_005296 +2008_005297 +2008_005300 +2008_005303 +2008_005310 +2008_005315 +2008_005321 +2008_005324 +2008_005325 +2008_005329 +2008_005331 +2008_005333 +2008_005336 +2008_005342 +2008_005345 +2008_005349 +2008_005350 +2008_005354 +2008_005357 +2008_005362 +2008_005363 +2008_005365 +2008_005367 +2008_005375 +2008_005376 +2008_005380 +2008_005382 +2008_005386 +2008_005389 +2008_005395 +2008_005396 +2008_005400 +2008_005405 +2008_005408 +2008_005412 +2008_005414 +2008_005415 +2008_005429 +2008_005433 +2008_005443 +2008_005449 +2008_005451 +2008_005456 +2008_005463 +2008_005465 +2008_005473 +2008_005477 +2008_005484 +2008_005491 +2008_005494 +2008_005496 +2008_005500 +2008_005502 +2008_005505 +2008_005507 +2008_005512 +2008_005514 +2008_005517 +2008_005519 +2008_005521 +2008_005523 +2008_005526 +2008_005527 +2008_005531 +2008_005536 +2008_005541 +2008_005549 +2008_005558 +2008_005560 +2008_005561 +2008_005567 +2008_005569 +2008_005570 +2008_005572 +2008_005584 +2008_005589 +2008_005591 +2008_005593 +2008_005600 +2008_005603 +2008_005609 +2008_005610 +2008_005616 +2008_005618 +2008_005623 +2008_005625 +2008_005626 +2008_005634 +2008_005636 +2008_005639 +2008_005641 +2008_005650 +2008_005653 +2008_005656 +2008_005668 +2008_005673 +2008_005675 +2008_005678 +2008_005679 +2008_005682 +2008_005683 +2008_005698 +2008_005705 +2008_005706 +2008_005707 +2008_005713 +2008_005714 +2008_005716 +2008_005719 +2008_005724 +2008_005728 +2008_005736 +2008_005737 +2008_005739 +2008_005742 +2008_005747 +2008_005752 +2008_005757 +2008_005758 +2008_005761 +2008_005767 +2008_005770 +2008_005780 +2008_005791 +2008_005794 +2008_005800 +2008_005803 +2008_005810 +2008_005817 +2008_005818 +2008_005822 +2008_005823 +2008_005832 +2008_005834 +2008_005839 +2008_005843 +2008_005845 +2008_005850 +2008_005853 +2008_005855 +2008_005856 +2008_005867 +2008_005871 +2008_005873 +2008_005874 +2008_005878 +2008_005882 +2008_005890 +2008_005891 +2008_005893 +2008_005897 +2008_005902 +2008_005903 +2008_005905 +2008_005916 +2008_005921 +2008_005923 +2008_005926 +2008_005929 +2008_005935 +2008_005937 +2008_005938 +2008_005945 +2008_005954 +2008_005956 +2008_005959 +2008_005960 +2008_005967 +2008_005968 +2008_005972 +2008_005976 +2008_005979 +2008_005982 +2008_005991 +2008_005997 +2008_006000 +2008_006004 +2008_006014 +2008_006020 +2008_006032 +2008_006039 +2008_006046 +2008_006049 +2008_006062 +2008_006064 +2008_006065 +2008_006067 +2008_006070 +2008_006074 +2008_006076 +2008_006078 +2008_006081 +2008_006085 +2008_006090 +2008_006092 +2008_006096 +2008_006099 +2008_006102 +2008_006111 +2008_006119 +2008_006121 +2008_006124 +2008_006128 +2008_006129 +2008_006133 +2008_006135 +2008_006136 +2008_006140 +2008_006145 +2008_006152 +2008_006158 +2008_006164 +2008_006170 +2008_006181 +2008_006182 +2008_006186 +2008_006188 +2008_006192 +2008_006194 +2008_006195 +2008_006210 +2008_006211 +2008_006213 +2008_006215 +2008_006220 +2008_006221 +2008_006224 +2008_006225 +2008_006232 +2008_006234 +2008_006235 +2008_006242 +2008_006244 +2008_006249 +2008_006250 +2008_006253 +2008_006256 +2008_006257 +2008_006258 +2008_006265 +2008_006271 +2008_006272 +2008_006273 +2008_006276 +2008_006280 +2008_006281 +2008_006289 +2008_006294 +2008_006295 +2008_006300 +2008_006315 +2008_006317 +2008_006323 +2008_006329 +2008_006331 +2008_006335 +2008_006336 +2008_006339 +2008_006345 +2008_006349 +2008_006350 +2008_006351 +2008_006353 +2008_006355 +2008_006361 +2008_006364 +2008_006365 +2008_006369 +2008_006370 +2008_006373 +2008_006376 +2008_006384 +2008_006386 +2008_006387 +2008_006389 +2008_006390 +2008_006400 +2008_006401 +2008_006404 +2008_006410 +2008_006417 +2008_006419 +2008_006421 +2008_006427 +2008_006430 +2008_006432 +2008_006433 +2008_006434 +2008_006436 +2008_006447 +2008_006448 +2008_006461 +2008_006462 +2008_006470 +2008_006474 +2008_006475 +2008_006481 +2008_006482 +2008_006483 +2008_006488 +2008_006490 +2008_006491 +2008_006496 +2008_006497 +2008_006500 +2008_006506 +2008_006509 +2008_006511 +2008_006512 +2008_006522 +2008_006538 +2008_006540 +2008_006543 +2008_006546 +2008_006549 +2008_006558 +2008_006561 +2008_006562 +2008_006564 +2008_006566 +2008_006567 +2008_006570 +2008_006578 +2008_006579 +2008_006585 +2008_006586 +2008_006598 +2008_006599 +2008_006602 +2008_006606 +2008_006610 +2008_006613 +2008_006619 +2008_006623 +2008_006624 +2008_006625 +2008_006626 +2008_006629 +2008_006634 +2008_006637 +2008_006638 +2008_006641 +2008_006645 +2008_006649 +2008_006650 +2008_006654 +2008_006655 +2008_006657 +2008_006663 +2008_006667 +2008_006668 +2008_006677 +2008_006682 +2008_006691 +2008_006692 +2008_006700 +2008_006705 +2008_006712 +2008_006715 +2008_006717 +2008_006718 +2008_006719 +2008_006720 +2008_006724 +2008_006728 +2008_006730 +2008_006733 +2008_006737 +2008_006748 +2008_006750 +2008_006751 +2008_006753 +2008_006761 +2008_006762 +2008_006764 +2008_006767 +2008_006778 +2008_006785 +2008_006802 +2008_006807 +2008_006808 +2008_006810 +2008_006818 +2008_006819 +2008_006820 +2008_006827 +2008_006832 +2008_006834 +2008_006843 +2008_006847 +2008_006857 +2008_006864 +2008_006865 +2008_006868 +2008_006872 +2008_006873 +2008_006877 +2008_006879 +2008_006881 +2008_006882 +2008_006889 +2008_006898 +2008_006902 +2008_006903 +2008_006908 +2008_006909 +2008_006910 +2008_006919 +2008_006920 +2008_006921 +2008_006923 +2008_006926 +2008_006933 +2008_006936 +2008_006946 +2008_006950 +2008_006953 +2008_006954 +2008_006960 +2008_006961 +2008_006962 +2008_006965 +2008_006969 +2008_006973 +2008_006992 +2008_007003 +2008_007004 +2008_007009 +2008_007011 +2008_007012 +2008_007014 +2008_007022 +2008_007026 +2008_007028 +2008_007030 +2008_007038 +2008_007039 +2008_007043 +2008_007045 +2008_007054 +2008_007058 +2008_007060 +2008_007061 +2008_007069 +2008_007073 +2008_007075 +2008_007076 +2008_007081 +2008_007082 +2008_007085 +2008_007090 +2008_007095 +2008_007097 +2008_007098 +2008_007101 +2008_007106 +2008_007115 +2008_007118 +2008_007124 +2008_007129 +2008_007131 +2008_007138 +2008_007142 +2008_007145 +2008_007146 +2008_007147 +2008_007151 +2008_007156 +2008_007161 +2008_007165 +2008_007168 +2008_007169 +2008_007179 +2008_007185 +2008_007197 +2008_007201 +2008_007205 +2008_007208 +2008_007211 +2008_007217 +2008_007218 +2008_007221 +2008_007223 +2008_007226 +2008_007236 +2008_007237 +2008_007239 +2008_007242 +2008_007245 +2008_007246 +2008_007252 +2008_007254 +2008_007260 +2008_007261 +2008_007265 +2008_007274 +2008_007280 +2008_007281 +2008_007286 +2008_007289 +2008_007291 +2008_007298 +2008_007307 +2008_007312 +2008_007313 +2008_007320 +2008_007321 +2008_007325 +2008_007335 +2008_007343 +2008_007346 +2008_007353 +2008_007356 +2008_007357 +2008_007361 +2008_007363 +2008_007364 +2008_007375 +2008_007382 +2008_007383 +2008_007388 +2008_007394 +2008_007397 +2008_007410 +2008_007421 +2008_007423 +2008_007424 +2008_007425 +2008_007428 +2008_007432 +2008_007433 +2008_007438 +2008_007442 +2008_007443 +2008_007444 +2008_007448 +2008_007456 +2008_007465 +2008_007469 +2008_007470 +2008_007471 +2008_007472 +2008_007473 +2008_007477 +2008_007485 +2008_007486 +2008_007491 +2008_007496 +2008_007500 +2008_007504 +2008_007509 +2008_007510 +2008_007511 +2008_007515 +2008_007519 +2008_007524 +2008_007528 +2008_007533 +2008_007537 +2008_007544 +2008_007546 +2008_007556 +2008_007559 +2008_007565 +2008_007573 +2008_007576 +2008_007581 +2008_007584 +2008_007588 +2008_007589 +2008_007593 +2008_007597 +2008_007604 +2008_007608 +2008_007611 +2008_007613 +2008_007621 +2008_007625 +2008_007629 +2008_007630 +2008_007640 +2008_007641 +2008_007646 +2008_007648 +2008_007653 +2008_007660 +2008_007664 +2008_007665 +2008_007666 +2008_007675 +2008_007682 +2008_007683 +2008_007691 +2008_007692 +2008_007696 +2008_007697 +2008_007698 +2008_007701 +2008_007709 +2008_007710 +2008_007717 +2008_007724 +2008_007726 +2008_007730 +2008_007742 +2008_007746 +2008_007748 +2008_007750 +2008_007752 +2008_007755 +2008_007758 +2008_007759 +2008_007761 +2008_007770 +2008_007777 +2008_007779 +2008_007780 +2008_007781 +2008_007786 +2008_007788 +2008_007789 +2008_007805 +2008_007812 +2008_007817 +2008_007825 +2008_007829 +2008_007833 +2008_007835 +2008_007837 +2008_007840 +2008_007842 +2008_007843 +2008_007848 +2008_007852 +2008_007858 +2008_007861 +2008_007864 +2008_007869 +2008_007870 +2008_007873 +2008_007877 +2008_007879 +2008_007882 +2008_007883 +2008_007891 +2008_007895 +2008_007897 +2008_007904 +2008_007907 +2008_007909 +2008_007912 +2008_007913 +2008_007916 +2008_007918 +2008_007928 +2008_007937 +2008_007938 +2008_007940 +2008_007941 +2008_007947 +2008_007950 +2008_007953 +2008_007962 +2008_007969 +2008_007973 +2008_007975 +2008_007977 +2008_007985 +2008_007987 +2008_007988 +2008_007990 +2008_007997 +2008_007998 +2008_007999 +2008_008002 +2008_008004 +2008_008007 +2008_008012 +2008_008018 +2008_008020 +2008_008021 +2008_008028 +2008_008031 +2008_008034 +2008_008037 +2008_008043 +2008_008048 +2008_008058 +2008_008064 +2008_008070 +2008_008072 +2008_008073 +2008_008074 +2008_008080 +2008_008083 +2008_008092 +2008_008095 +2008_008097 +2008_008098 +2008_008106 +2008_008112 +2008_008116 +2008_008121 +2008_008122 +2008_008125 +2008_008132 +2008_008147 +2008_008148 +2008_008150 +2008_008152 +2008_008154 +2008_008162 +2008_008166 +2008_008169 +2008_008170 +2008_008176 +2008_008180 +2008_008184 +2008_008193 +2008_008194 +2008_008197 +2008_008199 +2008_008200 +2008_008206 +2008_008211 +2008_008212 +2008_008215 +2008_008218 +2008_008220 +2008_008223 +2008_008227 +2008_008229 +2008_008237 +2008_008242 +2008_008247 +2008_008262 +2008_008263 +2008_008266 +2008_008274 +2008_008275 +2008_008276 +2008_008281 +2008_008287 +2008_008288 +2008_008294 +2008_008300 +2008_008309 +2008_008315 +2008_008319 +2008_008321 +2008_008323 +2008_008324 +2008_008325 +2008_008330 +2008_008338 +2008_008342 +2008_008343 +2008_008344 +2008_008345 +2008_008347 +2008_008356 +2008_008363 +2008_008364 +2008_008366 +2008_008368 +2008_008370 +2008_008382 +2008_008384 +2008_008391 +2008_008402 +2008_008403 +2008_008404 +2008_008410 +2008_008411 +2008_008416 +2008_008423 +2008_008428 +2008_008431 +2008_008432 +2008_008440 +2008_008447 +2008_008455 +2008_008462 +2008_008464 +2008_008471 +2008_008476 +2008_008479 +2008_008480 +2008_008482 +2008_008487 +2008_008490 +2008_008496 +2008_008497 +2008_008507 +2008_008508 +2008_008511 +2008_008517 +2008_008521 +2008_008522 +2008_008523 +2008_008525 +2008_008526 +2008_008528 +2008_008530 +2008_008533 +2008_008541 +2008_008544 +2008_008545 +2008_008546 +2008_008547 +2008_008549 +2008_008550 +2008_008560 +2008_008567 +2008_008572 +2008_008578 +2008_008579 +2008_008583 +2008_008589 +2008_008590 +2008_008591 +2008_008593 +2008_008600 +2008_008601 +2008_008607 +2008_008608 +2008_008616 +2008_008618 +2008_008623 +2008_008624 +2008_008635 +2008_008637 +2008_008641 +2008_008642 +2008_008649 +2008_008654 +2008_008665 +2008_008666 +2008_008668 +2008_008671 +2008_008673 +2008_008674 +2008_008681 +2008_008685 +2008_008689 +2008_008691 +2008_008694 +2008_008696 +2008_008697 +2008_008701 +2008_008706 +2008_008707 +2008_008714 +2008_008717 +2008_008719 +2008_008725 +2008_008735 +2008_008744 +2008_008745 +2008_008748 +2008_008749 +2008_008757 +2008_008770 +2008_008773 +2009_000006 +2009_000010 +2009_000014 +2009_000015 +2009_000016 +2009_000021 +2009_000027 +2009_000028 +2009_000029 +2009_000030 +2009_000040 +2009_000042 +2009_000052 +2009_000054 +2009_000056 +2009_000058 +2009_000059 +2009_000073 +2009_000082 +2009_000085 +2009_000088 +2009_000091 +2009_000100 +2009_000103 +2009_000104 +2009_000105 +2009_000109 +2009_000119 +2009_000120 +2009_000122 +2009_000128 +2009_000130 +2009_000131 +2009_000132 +2009_000133 +2009_000135 +2009_000137 +2009_000140 +2009_000141 +2009_000145 +2009_000150 +2009_000151 +2009_000159 +2009_000160 +2009_000161 +2009_000164 +2009_000168 +2009_000176 +2009_000177 +2009_000188 +2009_000195 +2009_000197 +2009_000203 +2009_000209 +2009_000217 +2009_000218 +2009_000223 +2009_000227 +2009_000229 +2009_000232 +2009_000233 +2009_000237 +2009_000239 +2009_000248 +2009_000250 +2009_000251 +2009_000253 +2009_000268 +2009_000277 +2009_000280 +2009_000281 +2009_000285 +2009_000287 +2009_000289 +2009_000290 +2009_000303 +2009_000317 +2009_000320 +2009_000322 +2009_000327 +2009_000336 +2009_000339 +2009_000340 +2009_000341 +2009_000343 +2009_000344 +2009_000347 +2009_000350 +2009_000367 +2009_000375 +2009_000377 +2009_000379 +2009_000385 +2009_000390 +2009_000393 +2009_000400 +2009_000405 +2009_000408 +2009_000409 +2009_000416 +2009_000419 +2009_000420 +2009_000438 +2009_000439 +2009_000443 +2009_000444 +2009_000445 +2009_000449 +2009_000452 +2009_000454 +2009_000463 +2009_000464 +2009_000471 +2009_000474 +2009_000476 +2009_000477 +2009_000486 +2009_000491 +2009_000493 +2009_000494 +2009_000500 +2009_000502 +2009_000503 +2009_000504 +2009_000505 +2009_000515 +2009_000522 +2009_000525 +2009_000527 +2009_000529 +2009_000532 +2009_000535 +2009_000539 +2009_000544 +2009_000546 +2009_000547 +2009_000553 +2009_000557 +2009_000560 +2009_000562 +2009_000565 +2009_000567 +2009_000575 +2009_000576 +2009_000577 +2009_000579 +2009_000585 +2009_000586 +2009_000591 +2009_000592 +2009_000595 +2009_000599 +2009_000600 +2009_000602 +2009_000603 +2009_000604 +2009_000617 +2009_000626 +2009_000629 +2009_000632 +2009_000635 +2009_000636 +2009_000638 +2009_000642 +2009_000651 +2009_000653 +2009_000655 +2009_000662 +2009_000663 +2009_000672 +2009_000679 +2009_000684 +2009_000686 +2009_000690 +2009_000692 +2009_000694 +2009_000695 +2009_000696 +2009_000708 +2009_000709 +2009_000718 +2009_000720 +2009_000722 +2009_000737 +2009_000744 +2009_000745 +2009_000746 +2009_000748 +2009_000750 +2009_000752 +2009_000755 +2009_000757 +2009_000759 +2009_000768 +2009_000770 +2009_000774 +2009_000777 +2009_000789 +2009_000790 +2009_000793 +2009_000794 +2009_000796 +2009_000797 +2009_000801 +2009_000804 +2009_000805 +2009_000815 +2009_000816 +2009_000831 +2009_000833 +2009_000834 +2009_000848 +2009_000849 +2009_000854 +2009_000867 +2009_000869 +2009_000874 +2009_000882 +2009_000887 +2009_000889 +2009_000894 +2009_000895 +2009_000899 +2009_000902 +2009_000906 +2009_000910 +2009_000915 +2009_000920 +2009_000926 +2009_000927 +2009_000930 +2009_000932 +2009_000937 +2009_000938 +2009_000945 +2009_000953 +2009_000961 +2009_000962 +2009_000967 +2009_000969 +2009_000970 +2009_000973 +2009_000974 +2009_000975 +2009_000979 +2009_000980 +2009_000981 +2009_000987 +2009_000990 +2009_000996 +2009_001002 +2009_001009 +2009_001012 +2009_001013 +2009_001019 +2009_001027 +2009_001036 +2009_001037 +2009_001040 +2009_001042 +2009_001052 +2009_001056 +2009_001059 +2009_001068 +2009_001070 +2009_001074 +2009_001078 +2009_001079 +2009_001081 +2009_001085 +2009_001091 +2009_001095 +2009_001096 +2009_001098 +2009_001100 +2009_001102 +2009_001103 +2009_001104 +2009_001105 +2009_001107 +2009_001110 +2009_001111 +2009_001117 +2009_001124 +2009_001129 +2009_001133 +2009_001135 +2009_001137 +2009_001138 +2009_001140 +2009_001145 +2009_001146 +2009_001147 +2009_001151 +2009_001152 +2009_001153 +2009_001154 +2009_001159 +2009_001163 +2009_001172 +2009_001177 +2009_001180 +2009_001188 +2009_001190 +2009_001192 +2009_001197 +2009_001199 +2009_001201 +2009_001203 +2009_001205 +2009_001206 +2009_001208 +2009_001216 +2009_001217 +2009_001221 +2009_001224 +2009_001229 +2009_001230 +2009_001236 +2009_001237 +2009_001238 +2009_001241 +2009_001251 +2009_001253 +2009_001254 +2009_001260 +2009_001263 +2009_001264 +2009_001268 +2009_001270 +2009_001271 +2009_001282 +2009_001283 +2009_001285 +2009_001291 +2009_001301 +2009_001303 +2009_001305 +2009_001306 +2009_001308 +2009_001311 +2009_001312 +2009_001319 +2009_001323 +2009_001327 +2009_001328 +2009_001329 +2009_001339 +2009_001344 +2009_001354 +2009_001357 +2009_001359 +2009_001360 +2009_001364 +2009_001368 +2009_001369 +2009_001372 +2009_001374 +2009_001375 +2009_001376 +2009_001385 +2009_001388 +2009_001389 +2009_001390 +2009_001395 +2009_001403 +2009_001412 +2009_001422 +2009_001424 +2009_001434 +2009_001435 +2009_001443 +2009_001444 +2009_001446 +2009_001448 +2009_001450 +2009_001452 +2009_001453 +2009_001457 +2009_001462 +2009_001463 +2009_001466 +2009_001472 +2009_001474 +2009_001475 +2009_001476 +2009_001480 +2009_001481 +2009_001493 +2009_001494 +2009_001500 +2009_001502 +2009_001507 +2009_001508 +2009_001514 +2009_001516 +2009_001517 +2009_001537 +2009_001538 +2009_001541 +2009_001542 +2009_001544 +2009_001546 +2009_001550 +2009_001553 +2009_001555 +2009_001558 +2009_001566 +2009_001567 +2009_001570 +2009_001585 +2009_001589 +2009_001590 +2009_001595 +2009_001598 +2009_001602 +2009_001605 +2009_001608 +2009_001611 +2009_001612 +2009_001614 +2009_001615 +2009_001625 +2009_001636 +2009_001638 +2009_001640 +2009_001642 +2009_001651 +2009_001657 +2009_001660 +2009_001664 +2009_001670 +2009_001671 +2009_001674 +2009_001676 +2009_001677 +2009_001678 +2009_001689 +2009_001690 +2009_001693 +2009_001695 +2009_001704 +2009_001705 +2009_001706 +2009_001715 +2009_001719 +2009_001720 +2009_001724 +2009_001732 +2009_001734 +2009_001735 +2009_001740 +2009_001744 +2009_001746 +2009_001747 +2009_001749 +2009_001750 +2009_001751 +2009_001755 +2009_001770 +2009_001779 +2009_001781 +2009_001782 +2009_001783 +2009_001792 +2009_001798 +2009_001800 +2009_001801 +2009_001802 +2009_001806 +2009_001807 +2009_001809 +2009_001812 +2009_001817 +2009_001825 +2009_001826 +2009_001827 +2009_001828 +2009_001831 +2009_001837 +2009_001840 +2009_001846 +2009_001847 +2009_001856 +2009_001861 +2009_001865 +2009_001867 +2009_001868 +2009_001869 +2009_001871 +2009_001873 +2009_001874 +2009_001875 +2009_001884 +2009_001885 +2009_001888 +2009_001894 +2009_001897 +2009_001898 +2009_001902 +2009_001904 +2009_001908 +2009_001910 +2009_001917 +2009_001922 +2009_001926 +2009_001927 +2009_001933 +2009_001934 +2009_001937 +2009_001948 +2009_001952 +2009_001959 +2009_001960 +2009_001961 +2009_001962 +2009_001964 +2009_001972 +2009_001975 +2009_001990 +2009_001994 +2009_001997 +2009_001999 +2009_002000 +2009_002010 +2009_002018 +2009_002019 +2009_002037 +2009_002040 +2009_002044 +2009_002052 +2009_002054 +2009_002057 +2009_002060 +2009_002064 +2009_002066 +2009_002072 +2009_002077 +2009_002083 +2009_002086 +2009_002088 +2009_002089 +2009_002093 +2009_002096 +2009_002098 +2009_002099 +2009_002103 +2009_002104 +2009_002105 +2009_002107 +2009_002112 +2009_002116 +2009_002117 +2009_002118 +2009_002119 +2009_002120 +2009_002123 +2009_002126 +2009_002129 +2009_002133 +2009_002145 +2009_002146 +2009_002147 +2009_002149 +2009_002151 +2009_002152 +2009_002153 +2009_002173 +2009_002176 +2009_002180 +2009_002182 +2009_002192 +2009_002193 +2009_002197 +2009_002198 +2009_002203 +2009_002204 +2009_002214 +2009_002216 +2009_002225 +2009_002229 +2009_002235 +2009_002236 +2009_002240 +2009_002245 +2009_002253 +2009_002254 +2009_002256 +2009_002258 +2009_002259 +2009_002262 +2009_002264 +2009_002271 +2009_002273 +2009_002274 +2009_002281 +2009_002285 +2009_002289 +2009_002297 +2009_002298 +2009_002299 +2009_002301 +2009_002311 +2009_002312 +2009_002314 +2009_002324 +2009_002326 +2009_002331 +2009_002338 +2009_002339 +2009_002343 +2009_002348 +2009_002352 +2009_002358 +2009_002362 +2009_002371 +2009_002376 +2009_002377 +2009_002381 +2009_002386 +2009_002387 +2009_002388 +2009_002391 +2009_002397 +2009_002404 +2009_002406 +2009_002408 +2009_002409 +2009_002416 +2009_002419 +2009_002422 +2009_002423 +2009_002424 +2009_002425 +2009_002429 +2009_002431 +2009_002434 +2009_002438 +2009_002439 +2009_002443 +2009_002448 +2009_002452 +2009_002456 +2009_002460 +2009_002472 +2009_002504 +2009_002505 +2009_002506 +2009_002514 +2009_002519 +2009_002522 +2009_002523 +2009_002530 +2009_002536 +2009_002542 +2009_002543 +2009_002553 +2009_002556 +2009_002557 +2009_002558 +2009_002559 +2009_002561 +2009_002565 +2009_002567 +2009_002577 +2009_002579 +2009_002585 +2009_002586 +2009_002588 +2009_002595 +2009_002597 +2009_002599 +2009_002605 +2009_002611 +2009_002612 +2009_002613 +2009_002615 +2009_002616 +2009_002620 +2009_002621 +2009_002624 +2009_002625 +2009_002626 +2009_002628 +2009_002629 +2009_002648 +2009_002652 +2009_002659 +2009_002662 +2009_002671 +2009_002672 +2009_002674 +2009_002676 +2009_002685 +2009_002688 +2009_002689 +2009_002695 +2009_002697 +2009_002703 +2009_002704 +2009_002705 +2009_002710 +2009_002713 +2009_002714 +2009_002715 +2009_002719 +2009_002725 +2009_002728 +2009_002734 +2009_002746 +2009_002750 +2009_002758 +2009_002759 +2009_002763 +2009_002764 +2009_002770 +2009_002780 +2009_002784 +2009_002789 +2009_002791 +2009_002792 +2009_002798 +2009_002799 +2009_002813 +2009_002814 +2009_002817 +2009_002820 +2009_002824 +2009_002827 +2009_002831 +2009_002835 +2009_002842 +2009_002843 +2009_002844 +2009_002845 +2009_002847 +2009_002849 +2009_002850 +2009_002851 +2009_002853 +2009_002855 +2009_002862 +2009_002867 +2009_002869 +2009_002872 +2009_002879 +2009_002885 +2009_002890 +2009_002893 +2009_002897 +2009_002901 +2009_002908 +2009_002912 +2009_002914 +2009_002917 +2009_002921 +2009_002932 +2009_002933 +2009_002935 +2009_002937 +2009_002946 +2009_002947 +2009_002952 +2009_002954 +2009_002955 +2009_002957 +2009_002958 +2009_002961 +2009_002970 +2009_002971 +2009_002972 +2009_002976 +2009_002980 +2009_002983 +2009_002984 +2009_002988 +2009_002993 +2009_002999 +2009_003000 +2009_003002 +2009_003006 +2009_003007 +2009_003012 +2009_003019 +2009_003032 +2009_003034 +2009_003035 +2009_003039 +2009_003042 +2009_003053 +2009_003054 +2009_003056 +2009_003064 +2009_003066 +2009_003067 +2009_003068 +2009_003075 +2009_003077 +2009_003078 +2009_003082 +2009_003087 +2009_003088 +2009_003090 +2009_003091 +2009_003093 +2009_003095 +2009_003107 +2009_003108 +2009_003109 +2009_003115 +2009_003116 +2009_003118 +2009_003127 +2009_003138 +2009_003142 +2009_003146 +2009_003147 +2009_003155 +2009_003156 +2009_003157 +2009_003164 +2009_003165 +2009_003166 +2009_003168 +2009_003172 +2009_003173 +2009_003175 +2009_003187 +2009_003200 +2009_003208 +2009_003209 +2009_003218 +2009_003219 +2009_003222 +2009_003225 +2009_003229 +2009_003232 +2009_003233 +2009_003234 +2009_003249 +2009_003253 +2009_003257 +2009_003261 +2009_003265 +2009_003267 +2009_003272 +2009_003277 +2009_003285 +2009_003290 +2009_003309 +2009_003310 +2009_003315 +2009_003316 +2009_003317 +2009_003326 +2009_003327 +2009_003333 +2009_003338 +2009_003340 +2009_003345 +2009_003349 +2009_003350 +2009_003352 +2009_003353 +2009_003360 +2009_003361 +2009_003363 +2009_003365 +2009_003367 +2009_003369 +2009_003377 +2009_003381 +2009_003383 +2009_003384 +2009_003385 +2009_003386 +2009_003395 +2009_003396 +2009_003402 +2009_003407 +2009_003416 +2009_003419 +2009_003425 +2009_003430 +2009_003436 +2009_003443 +2009_003446 +2009_003447 +2009_003454 +2009_003455 +2009_003458 +2009_003459 +2009_003461 +2009_003468 +2009_003482 +2009_003488 +2009_003489 +2009_003490 +2009_003492 +2009_003497 +2009_003510 +2009_003511 +2009_003513 +2009_003519 +2009_003520 +2009_003522 +2009_003524 +2009_003531 +2009_003533 +2009_003534 +2009_003539 +2009_003540 +2009_003541 +2009_003545 +2009_003546 +2009_003555 +2009_003562 +2009_003563 +2009_003572 +2009_003577 +2009_003583 +2009_003594 +2009_003600 +2009_003601 +2009_003605 +2009_003608 +2009_003609 +2009_003613 +2009_003614 +2009_003624 +2009_003629 +2009_003634 +2009_003636 +2009_003639 +2009_003644 +2009_003646 +2009_003647 +2009_003652 +2009_003654 +2009_003657 +2009_003660 +2009_003663 +2009_003667 +2009_003668 +2009_003677 +2009_003683 +2009_003685 +2009_003688 +2009_003690 +2009_003694 +2009_003695 +2009_003697 +2009_003702 +2009_003705 +2009_003708 +2009_003709 +2009_003711 +2009_003717 +2009_003720 +2009_003722 +2009_003732 +2009_003734 +2009_003735 +2009_003736 +2009_003739 +2009_003743 +2009_003752 +2009_003753 +2009_003757 +2009_003760 +2009_003765 +2009_003768 +2009_003775 +2009_003783 +2009_003784 +2009_003786 +2009_003790 +2009_003793 +2009_003799 +2009_003801 +2009_003808 +2009_003815 +2009_003816 +2009_003818 +2009_003819 +2009_003820 +2009_003825 +2009_003827 +2009_003829 +2009_003837 +2009_003838 +2009_003843 +2009_003846 +2009_003848 +2009_003852 +2009_003860 +2009_003865 +2009_003867 +2009_003873 +2009_003883 +2009_003888 +2009_003896 +2009_003897 +2009_003900 +2009_003912 +2009_003913 +2009_003920 +2009_003921 +2009_003922 +2009_003933 +2009_003942 +2009_003956 +2009_003958 +2009_003961 +2009_003966 +2009_003974 +2009_003975 +2009_003976 +2009_003985 +2009_003993 +2009_003994 +2009_004002 +2009_004005 +2009_004007 +2009_004012 +2009_004018 +2009_004020 +2009_004023 +2009_004025 +2009_004037 +2009_004042 +2009_004055 +2009_004058 +2009_004069 +2009_004073 +2009_004074 +2009_004078 +2009_004082 +2009_004083 +2009_004088 +2009_004091 +2009_004094 +2009_004095 +2009_004096 +2009_004100 +2009_004103 +2009_004105 +2009_004109 +2009_004112 +2009_004117 +2009_004118 +2009_004121 +2009_004122 +2009_004133 +2009_004134 +2009_004139 +2009_004153 +2009_004154 +2009_004159 +2009_004162 +2009_004165 +2009_004168 +2009_004169 +2009_004171 +2009_004173 +2009_004174 +2009_004176 +2009_004177 +2009_004178 +2009_004179 +2009_004180 +2009_004181 +2009_004183 +2009_004186 +2009_004187 +2009_004191 +2009_004199 +2009_004200 +2009_004201 +2009_004202 +2009_004211 +2009_004212 +2009_004213 +2009_004218 +2009_004222 +2009_004225 +2009_004227 +2009_004228 +2009_004229 +2009_004231 +2009_004234 +2009_004244 +2009_004249 +2009_004261 +2009_004264 +2009_004271 +2009_004276 +2009_004278 +2009_004279 +2009_004283 +2009_004285 +2009_004289 +2009_004290 +2009_004295 +2009_004301 +2009_004308 +2009_004312 +2009_004315 +2009_004316 +2009_004317 +2009_004319 +2009_004322 +2009_004323 +2009_004327 +2009_004328 +2009_004334 +2009_004336 +2009_004338 +2009_004340 +2009_004341 +2009_004347 +2009_004351 +2009_004357 +2009_004358 +2009_004368 +2009_004369 +2009_004370 +2009_004371 +2009_004374 +2009_004375 +2009_004383 +2009_004392 +2009_004394 +2009_004397 +2009_004399 +2009_004406 +2009_004409 +2009_004417 +2009_004424 +2009_004425 +2009_004426 +2009_004429 +2009_004432 +2009_004434 +2009_004438 +2009_004442 +2009_004444 +2009_004445 +2009_004446 +2009_004449 +2009_004451 +2009_004452 +2009_004454 +2009_004464 +2009_004465 +2009_004475 +2009_004477 +2009_004479 +2009_004486 +2009_004492 +2009_004501 +2009_004503 +2009_004508 +2009_004511 +2009_004513 +2009_004514 +2009_004519 +2009_004527 +2009_004539 +2009_004545 +2009_004547 +2009_004554 +2009_004557 +2009_004560 +2009_004561 +2009_004562 +2009_004565 +2009_004570 +2009_004571 +2009_004572 +2009_004582 +2009_004593 +2009_004598 +2009_004606 +2009_004616 +2009_004619 +2009_004620 +2009_004626 +2009_004628 +2009_004631 +2009_004639 +2009_004642 +2009_004643 +2009_004647 +2009_004651 +2009_004652 +2009_004656 +2009_004661 +2009_004662 +2009_004667 +2009_004671 +2009_004674 +2009_004681 +2009_004683 +2009_004684 +2009_004688 +2009_004694 +2009_004701 +2009_004705 +2009_004708 +2009_004709 +2009_004710 +2009_004719 +2009_004723 +2009_004728 +2009_004731 +2009_004734 +2009_004737 +2009_004745 +2009_004756 +2009_004759 +2009_004760 +2009_004761 +2009_004764 +2009_004766 +2009_004771 +2009_004772 +2009_004779 +2009_004786 +2009_004787 +2009_004790 +2009_004794 +2009_004797 +2009_004798 +2009_004804 +2009_004805 +2009_004806 +2009_004813 +2009_004815 +2009_004817 +2009_004824 +2009_004829 +2009_004830 +2009_004831 +2009_004836 +2009_004839 +2009_004846 +2009_004847 +2009_004855 +2009_004871 +2009_004874 +2009_004877 +2009_004880 +2009_004887 +2009_004888 +2009_004890 +2009_004898 +2009_004901 +2009_004903 +2009_004904 +2009_004905 +2009_004907 +2009_004914 +2009_004919 +2009_004921 +2009_004926 +2009_004939 +2009_004943 +2009_004944 +2009_004945 +2009_004953 +2009_004958 +2009_004959 +2009_004962 +2009_004965 +2009_004972 +2009_004975 +2009_004977 +2009_004979 +2009_004980 +2009_004983 +2009_004984 +2009_004986 +2009_004990 +2009_004999 +2009_005000 +2009_005006 +2009_005015 +2009_005016 +2009_005024 +2009_005030 +2009_005031 +2009_005035 +2009_005037 +2009_005040 +2009_005042 +2009_005044 +2009_005045 +2009_005051 +2009_005055 +2009_005056 +2009_005057 +2009_005069 +2009_005070 +2009_005075 +2009_005076 +2009_005081 +2009_005084 +2009_005085 +2009_005094 +2009_005095 +2009_005102 +2009_005107 +2009_005118 +2009_005120 +2009_005126 +2009_005127 +2009_005128 +2009_005129 +2009_005130 +2009_005131 +2009_005133 +2009_005141 +2009_005142 +2009_005144 +2009_005145 +2009_005147 +2009_005154 +2009_005155 +2009_005160 +2009_005162 +2009_005163 +2009_005168 +2009_005170 +2009_005177 +2009_005181 +2009_005183 +2009_005191 +2009_005194 +2009_005198 +2009_005201 +2009_005218 +2009_005234 +2009_005236 +2009_005240 +2009_005246 +2009_005247 +2009_005251 +2009_005256 +2009_005263 +2009_005265 +2009_005269 +2009_005272 +2009_005278 +2009_005282 +2009_005287 +2009_005293 +2009_005297 +2009_005303 +2009_005307 +2009_005308 +2009_005311 +2010_000002 +2010_000009 +2010_000014 +2010_000018 +2010_000023 +2010_000026 +2010_000031 +2010_000043 +2010_000045 +2010_000048 +2010_000052 +2010_000055 +2010_000056 +2010_000061 +2010_000063 +2010_000067 +2010_000071 +2010_000073 +2010_000075 +2010_000076 +2010_000079 +2010_000080 +2010_000082 +2010_000089 +2010_000091 +2010_000103 +2010_000109 +2010_000111 +2010_000114 +2010_000117 +2010_000120 +2010_000124 +2010_000131 +2010_000132 +2010_000133 +2010_000136 +2010_000137 +2010_000138 +2010_000141 +2010_000148 +2010_000152 +2010_000157 +2010_000165 +2010_000169 +2010_000177 +2010_000182 +2010_000183 +2010_000187 +2010_000189 +2010_000190 +2010_000195 +2010_000198 +2010_000203 +2010_000204 +2010_000209 +2010_000222 +2010_000224 +2010_000227 +2010_000229 +2010_000233 +2010_000234 +2010_000244 +2010_000245 +2010_000248 +2010_000249 +2010_000250 +2010_000255 +2010_000263 +2010_000264 +2010_000269 +2010_000270 +2010_000276 +2010_000285 +2010_000293 +2010_000296 +2010_000299 +2010_000302 +2010_000303 +2010_000307 +2010_000310 +2010_000320 +2010_000323 +2010_000329 +2010_000337 +2010_000347 +2010_000356 +2010_000361 +2010_000362 +2010_000371 +2010_000377 +2010_000386 +2010_000388 +2010_000389 +2010_000392 +2010_000393 +2010_000394 +2010_000395 +2010_000404 +2010_000413 +2010_000415 +2010_000419 +2010_000420 +2010_000432 +2010_000436 +2010_000437 +2010_000439 +2010_000447 +2010_000448 +2010_000453 +2010_000458 +2010_000459 +2010_000463 +2010_000465 +2010_000466 +2010_000469 +2010_000473 +2010_000477 +2010_000480 +2010_000484 +2010_000488 +2010_000490 +2010_000492 +2010_000495 +2010_000498 +2010_000500 +2010_000503 +2010_000508 +2010_000511 +2010_000513 +2010_000519 +2010_000522 +2010_000527 +2010_000534 +2010_000538 +2010_000545 +2010_000549 +2010_000556 +2010_000557 +2010_000561 +2010_000564 +2010_000567 +2010_000568 +2010_000571 +2010_000576 +2010_000577 +2010_000578 +2010_000581 +2010_000588 +2010_000591 +2010_000601 +2010_000613 +2010_000616 +2010_000626 +2010_000630 +2010_000632 +2010_000641 +2010_000644 +2010_000645 +2010_000648 +2010_000651 +2010_000658 +2010_000661 +2010_000664 +2010_000667 +2010_000671 +2010_000674 +2010_000675 +2010_000678 +2010_000681 +2010_000685 +2010_000687 +2010_000688 +2010_000691 +2010_000694 +2010_000702 +2010_000707 +2010_000710 +2010_000715 +2010_000716 +2010_000717 +2010_000721 +2010_000723 +2010_000739 +2010_000740 +2010_000746 +2010_000747 +2010_000748 +2010_000750 +2010_000760 +2010_000765 +2010_000769 +2010_000770 +2010_000772 +2010_000773 +2010_000782 +2010_000785 +2010_000787 +2010_000799 +2010_000800 +2010_000803 +2010_000806 +2010_000807 +2010_000808 +2010_000810 +2010_000815 +2010_000827 +2010_000837 +2010_000838 +2010_000842 +2010_000847 +2010_000849 +2010_000855 +2010_000857 +2010_000860 +2010_000862 +2010_000863 +2010_000871 +2010_000872 +2010_000879 +2010_000885 +2010_000887 +2010_000891 +2010_000899 +2010_000908 +2010_000910 +2010_000912 +2010_000914 +2010_000920 +2010_000922 +2010_000926 +2010_000938 +2010_000939 +2010_000942 +2010_000954 +2010_000970 +2010_000971 +2010_000974 +2010_000978 +2010_000979 +2010_000983 +2010_000984 +2010_000986 +2010_000991 +2010_000994 +2010_000995 +2010_001002 +2010_001012 +2010_001013 +2010_001020 +2010_001023 +2010_001025 +2010_001039 +2010_001043 +2010_001044 +2010_001054 +2010_001063 +2010_001066 +2010_001074 +2010_001076 +2010_001087 +2010_001092 +2010_001094 +2010_001098 +2010_001100 +2010_001103 +2010_001105 +2010_001106 +2010_001110 +2010_001111 +2010_001112 +2010_001113 +2010_001118 +2010_001120 +2010_001121 +2010_001123 +2010_001126 +2010_001131 +2010_001134 +2010_001139 +2010_001140 +2010_001142 +2010_001143 +2010_001148 +2010_001152 +2010_001154 +2010_001159 +2010_001160 +2010_001175 +2010_001177 +2010_001179 +2010_001183 +2010_001184 +2010_001185 +2010_001193 +2010_001195 +2010_001199 +2010_001205 +2010_001210 +2010_001211 +2010_001212 +2010_001224 +2010_001225 +2010_001237 +2010_001240 +2010_001245 +2010_001247 +2010_001250 +2010_001253 +2010_001254 +2010_001261 +2010_001271 +2010_001273 +2010_001274 +2010_001275 +2010_001277 +2010_001279 +2010_001282 +2010_001288 +2010_001289 +2010_001299 +2010_001310 +2010_001311 +2010_001312 +2010_001317 +2010_001320 +2010_001328 +2010_001329 +2010_001337 +2010_001338 +2010_001339 +2010_001344 +2010_001347 +2010_001356 +2010_001360 +2010_001361 +2010_001363 +2010_001366 +2010_001370 +2010_001372 +2010_001374 +2010_001383 +2010_001385 +2010_001386 +2010_001390 +2010_001395 +2010_001397 +2010_001399 +2010_001401 +2010_001402 +2010_001406 +2010_001408 +2010_001410 +2010_001413 +2010_001418 +2010_001422 +2010_001425 +2010_001430 +2010_001431 +2010_001433 +2010_001434 +2010_001435 +2010_001450 +2010_001456 +2010_001457 +2010_001458 +2010_001464 +2010_001465 +2010_001472 +2010_001478 +2010_001480 +2010_001481 +2010_001487 +2010_001489 +2010_001499 +2010_001503 +2010_001511 +2010_001514 +2010_001515 +2010_001529 +2010_001533 +2010_001537 +2010_001547 +2010_001550 +2010_001551 +2010_001552 +2010_001555 +2010_001560 +2010_001561 +2010_001562 +2010_001569 +2010_001572 +2010_001576 +2010_001580 +2010_001583 +2010_001590 +2010_001592 +2010_001594 +2010_001595 +2010_001596 +2010_001599 +2010_001602 +2010_001603 +2010_001607 +2010_001608 +2010_001618 +2010_001619 +2010_001626 +2010_001630 +2010_001638 +2010_001644 +2010_001647 +2010_001649 +2010_001650 +2010_001660 +2010_001665 +2010_001674 +2010_001676 +2010_001687 +2010_001689 +2010_001694 +2010_001698 +2010_001700 +2010_001706 +2010_001709 +2010_001710 +2010_001715 +2010_001718 +2010_001719 +2010_001726 +2010_001729 +2010_001732 +2010_001743 +2010_001744 +2010_001746 +2010_001747 +2010_001748 +2010_001753 +2010_001756 +2010_001759 +2010_001762 +2010_001770 +2010_001776 +2010_001780 +2010_001784 +2010_001785 +2010_001794 +2010_001795 +2010_001797 +2010_001801 +2010_001806 +2010_001807 +2010_001808 +2010_001810 +2010_001817 +2010_001841 +2010_001842 +2010_001846 +2010_001849 +2010_001850 +2010_001852 +2010_001853 +2010_001856 +2010_001858 +2010_001860 +2010_001864 +2010_001870 +2010_001881 +2010_001884 +2010_001885 +2010_001896 +2010_001899 +2010_001911 +2010_001919 +2010_001922 +2010_001923 +2010_001924 +2010_001931 +2010_001933 +2010_001934 +2010_001939 +2010_001940 +2010_001941 +2010_001944 +2010_001948 +2010_001957 +2010_001960 +2010_001970 +2010_001973 +2010_001974 +2010_001976 +2010_001978 +2010_001979 +2010_001980 +2010_001981 +2010_001982 +2010_001993 +2010_001994 +2010_002015 +2010_002018 +2010_002020 +2010_002023 +2010_002026 +2010_002032 +2010_002037 +2010_002039 +2010_002042 +2010_002044 +2010_002045 +2010_002047 +2010_002054 +2010_002055 +2010_002057 +2010_002065 +2010_002068 +2010_002070 +2010_002080 +2010_002095 +2010_002097 +2010_002104 +2010_002107 +2010_002118 +2010_002121 +2010_002127 +2010_002129 +2010_002130 +2010_002132 +2010_002136 +2010_002139 +2010_002141 +2010_002143 +2010_002149 +2010_002152 +2010_002154 +2010_002166 +2010_002168 +2010_002176 +2010_002177 +2010_002179 +2010_002180 +2010_002185 +2010_002191 +2010_002193 +2010_002203 +2010_002204 +2010_002207 +2010_002208 +2010_002215 +2010_002216 +2010_002218 +2010_002220 +2010_002221 +2010_002226 +2010_002227 +2010_002236 +2010_002242 +2010_002243 +2010_002248 +2010_002254 +2010_002263 +2010_002267 +2010_002274 +2010_002278 +2010_002286 +2010_002295 +2010_002299 +2010_002301 +2010_002309 +2010_002312 +2010_002318 +2010_002320 +2010_002327 +2010_002333 +2010_002338 +2010_002346 +2010_002349 +2010_002353 +2010_002356 +2010_002363 +2010_002364 +2010_002368 +2010_002369 +2010_002371 +2010_002374 +2010_002378 +2010_002379 +2010_002382 +2010_002387 +2010_002391 +2010_002392 +2010_002393 +2010_002399 +2010_002400 +2010_002410 +2010_002413 +2010_002418 +2010_002424 +2010_002425 +2010_002429 +2010_002431 +2010_002435 +2010_002438 +2010_002439 +2010_002440 +2010_002445 +2010_002452 +2010_002455 +2010_002456 +2010_002457 +2010_002459 +2010_002462 +2010_002469 +2010_002472 +2010_002475 +2010_002485 +2010_002487 +2010_002492 +2010_002496 +2010_002497 +2010_002498 +2010_002499 +2010_002501 +2010_002507 +2010_002509 +2010_002513 +2010_002520 +2010_002527 +2010_002529 +2010_002532 +2010_002537 +2010_002551 +2010_002552 +2010_002553 +2010_002556 +2010_002562 +2010_002567 +2010_002570 +2010_002573 +2010_002575 +2010_002577 +2010_002582 +2010_002583 +2010_002589 +2010_002592 +2010_002594 +2010_002614 +2010_002615 +2010_002616 +2010_002618 +2010_002620 +2010_002624 +2010_002625 +2010_002626 +2010_002628 +2010_002642 +2010_002644 +2010_002647 +2010_002653 +2010_002656 +2010_002659 +2010_002662 +2010_002665 +2010_002674 +2010_002675 +2010_002684 +2010_002686 +2010_002688 +2010_002692 +2010_002696 +2010_002697 +2010_002702 +2010_002708 +2010_002720 +2010_002722 +2010_002729 +2010_002733 +2010_002734 +2010_002742 +2010_002746 +2010_002747 +2010_002750 +2010_002752 +2010_002759 +2010_002760 +2010_002772 +2010_002778 +2010_002779 +2010_002781 +2010_002786 +2010_002794 +2010_002797 +2010_002801 +2010_002805 +2010_002811 +2010_002813 +2010_002815 +2010_002816 +2010_002820 +2010_002821 +2010_002830 +2010_002831 +2010_002834 +2010_002838 +2010_002839 +2010_002841 +2010_002842 +2010_002843 +2010_002844 +2010_002851 +2010_002855 +2010_002856 +2010_002857 +2010_002865 +2010_002870 +2010_002880 +2010_002884 +2010_002891 +2010_002892 +2010_002896 +2010_002899 +2010_002901 +2010_002903 +2010_002907 +2010_002909 +2010_002915 +2010_002917 +2010_002931 +2010_002935 +2010_002937 +2010_002938 +2010_002941 +2010_002946 +2010_002947 +2010_002948 +2010_002955 +2010_002962 +2010_002973 +2010_002976 +2010_002978 +2010_002979 +2010_002982 +2010_002987 +2010_002990 +2010_003002 +2010_003003 +2010_003007 +2010_003010 +2010_003011 +2010_003013 +2010_003017 +2010_003025 +2010_003027 +2010_003028 +2010_003032 +2010_003034 +2010_003035 +2010_003037 +2010_003044 +2010_003047 +2010_003050 +2010_003053 +2010_003055 +2010_003056 +2010_003057 +2010_003062 +2010_003077 +2010_003078 +2010_003084 +2010_003086 +2010_003088 +2010_003093 +2010_003094 +2010_003097 +2010_003101 +2010_003106 +2010_003108 +2010_003114 +2010_003115 +2010_003117 +2010_003119 +2010_003137 +2010_003138 +2010_003143 +2010_003148 +2010_003149 +2010_003151 +2010_003153 +2010_003157 +2010_003159 +2010_003162 +2010_003169 +2010_003170 +2010_003173 +2010_003174 +2010_003179 +2010_003185 +2010_003186 +2010_003191 +2010_003192 +2010_003197 +2010_003203 +2010_003204 +2010_003206 +2010_003218 +2010_003222 +2010_003227 +2010_003230 +2010_003238 +2010_003241 +2010_003250 +2010_003252 +2010_003255 +2010_003256 +2010_003259 +2010_003263 +2010_003264 +2010_003269 +2010_003274 +2010_003280 +2010_003283 +2010_003290 +2010_003291 +2010_003297 +2010_003300 +2010_003301 +2010_003304 +2010_003305 +2010_003309 +2010_003329 +2010_003332 +2010_003333 +2010_003337 +2010_003342 +2010_003343 +2010_003344 +2010_003345 +2010_003350 +2010_003351 +2010_003353 +2010_003355 +2010_003367 +2010_003370 +2010_003371 +2010_003372 +2010_003374 +2010_003380 +2010_003383 +2010_003384 +2010_003391 +2010_003395 +2010_003400 +2010_003405 +2010_003406 +2010_003415 +2010_003421 +2010_003432 +2010_003435 +2010_003436 +2010_003437 +2010_003439 +2010_003469 +2010_003474 +2010_003477 +2010_003478 +2010_003481 +2010_003483 +2010_003491 +2010_003507 +2010_003509 +2010_003512 +2010_003513 +2010_003526 +2010_003529 +2010_003534 +2010_003535 +2010_003538 +2010_003539 +2010_003546 +2010_003549 +2010_003551 +2010_003554 +2010_003556 +2010_003560 +2010_003567 +2010_003574 +2010_003576 +2010_003582 +2010_003592 +2010_003598 +2010_003599 +2010_003601 +2010_003604 +2010_003608 +2010_003612 +2010_003618 +2010_003625 +2010_003629 +2010_003634 +2010_003635 +2010_003643 +2010_003644 +2010_003648 +2010_003649 +2010_003651 +2010_003656 +2010_003665 +2010_003670 +2010_003671 +2010_003672 +2010_003674 +2010_003677 +2010_003680 +2010_003686 +2010_003689 +2010_003690 +2010_003696 +2010_003703 +2010_003714 +2010_003717 +2010_003719 +2010_003721 +2010_003725 +2010_003734 +2010_003736 +2010_003737 +2010_003743 +2010_003747 +2010_003752 +2010_003754 +2010_003770 +2010_003773 +2010_003784 +2010_003788 +2010_003789 +2010_003791 +2010_003798 +2010_003799 +2010_003804 +2010_003815 +2010_003816 +2010_003818 +2010_003821 +2010_003822 +2010_003825 +2010_003837 +2010_003844 +2010_003845 +2010_003856 +2010_003860 +2010_003864 +2010_003865 +2010_003871 +2010_003874 +2010_003875 +2010_003877 +2010_003884 +2010_003887 +2010_003891 +2010_003892 +2010_003893 +2010_003894 +2010_003897 +2010_003899 +2010_003900 +2010_003906 +2010_003910 +2010_003911 +2010_003914 +2010_003925 +2010_003929 +2010_003931 +2010_003937 +2010_003938 +2010_003945 +2010_003949 +2010_003950 +2010_003954 +2010_003957 +2010_003958 +2010_003974 +2010_003982 +2010_003987 +2010_003994 +2010_003995 +2010_003996 +2010_004002 +2010_004005 +2010_004007 +2010_004008 +2010_004009 +2010_004011 +2010_004014 +2010_004017 +2010_004025 +2010_004028 +2010_004029 +2010_004030 +2010_004033 +2010_004043 +2010_004045 +2010_004048 +2010_004052 +2010_004053 +2010_004059 +2010_004060 +2010_004061 +2010_004062 +2010_004065 +2010_004066 +2010_004069 +2010_004071 +2010_004072 +2010_004074 +2010_004075 +2010_004081 +2010_004084 +2010_004089 +2010_004092 +2010_004108 +2010_004109 +2010_004111 +2010_004116 +2010_004118 +2010_004119 +2010_004121 +2010_004123 +2010_004130 +2010_004133 +2010_004138 +2010_004144 +2010_004148 +2010_004154 +2010_004160 +2010_004162 +2010_004163 +2010_004168 +2010_004171 +2010_004172 +2010_004175 +2010_004180 +2010_004186 +2010_004191 +2010_004192 +2010_004197 +2010_004198 +2010_004204 +2010_004210 +2010_004216 +2010_004222 +2010_004223 +2010_004231 +2010_004239 +2010_004242 +2010_004244 +2010_004247 +2010_004248 +2010_004249 +2010_004252 +2010_004256 +2010_004258 +2010_004259 +2010_004264 +2010_004271 +2010_004275 +2010_004276 +2010_004282 +2010_004283 +2010_004288 +2010_004289 +2010_004295 +2010_004296 +2010_004301 +2010_004306 +2010_004307 +2010_004311 +2010_004325 +2010_004327 +2010_004332 +2010_004333 +2010_004336 +2010_004344 +2010_004346 +2010_004349 +2010_004357 +2010_004358 +2010_004360 +2010_004361 +2010_004363 +2010_004365 +2010_004366 +2010_004367 +2010_004368 +2010_004370 +2010_004371 +2010_004373 +2010_004385 +2010_004402 +2010_004412 +2010_004423 +2010_004429 +2010_004436 +2010_004441 +2010_004445 +2010_004448 +2010_004450 +2010_004451 +2010_004459 +2010_004466 +2010_004467 +2010_004476 +2010_004477 +2010_004478 +2010_004481 +2010_004491 +2010_004492 +2010_004493 +2010_004499 +2010_004501 +2010_004511 +2010_004514 +2010_004517 +2010_004518 +2010_004521 +2010_004523 +2010_004540 +2010_004546 +2010_004558 +2010_004560 +2010_004561 +2010_004569 +2010_004573 +2010_004575 +2010_004576 +2010_004577 +2010_004581 +2010_004591 +2010_004592 +2010_004594 +2010_004598 +2010_004600 +2010_004601 +2010_004604 +2010_004609 +2010_004616 +2010_004620 +2010_004621 +2010_004625 +2010_004631 +2010_004638 +2010_004646 +2010_004655 +2010_004656 +2010_004657 +2010_004660 +2010_004665 +2010_004666 +2010_004669 +2010_004676 +2010_004680 +2010_004683 +2010_004690 +2010_004694 +2010_004696 +2010_004698 +2010_004703 +2010_004704 +2010_004708 +2010_004710 +2010_004712 +2010_004717 +2010_004721 +2010_004726 +2010_004728 +2010_004729 +2010_004730 +2010_004738 +2010_004741 +2010_004749 +2010_004751 +2010_004760 +2010_004765 +2010_004766 +2010_004770 +2010_004773 +2010_004777 +2010_004782 +2010_004791 +2010_004793 +2010_004797 +2010_004805 +2010_004806 +2010_004807 +2010_004808 +2010_004812 +2010_004816 +2010_004822 +2010_004824 +2010_004826 +2010_004831 +2010_004832 +2010_004838 +2010_004841 +2010_004844 +2010_004847 +2010_004848 +2010_004852 +2010_004855 +2010_004871 +2010_004874 +2010_004878 +2010_004879 +2010_004888 +2010_004890 +2010_004896 +2010_004900 +2010_004910 +2010_004913 +2010_004916 +2010_004918 +2010_004922 +2010_004928 +2010_004933 +2010_004937 +2010_004938 +2010_004942 +2010_004943 +2010_004944 +2010_004945 +2010_004948 +2010_004950 +2010_004953 +2010_004959 +2010_004960 +2010_004962 +2010_004963 +2010_004966 +2010_004968 +2010_004970 +2010_004971 +2010_004973 +2010_004974 +2010_004983 +2010_004987 +2010_004991 +2010_004995 +2010_004997 +2010_005002 +2010_005011 +2010_005016 +2010_005017 +2010_005018 +2010_005019 +2010_005022 +2010_005028 +2010_005033 +2010_005041 +2010_005054 +2010_005055 +2010_005060 +2010_005062 +2010_005064 +2010_005068 +2010_005071 +2010_005072 +2010_005080 +2010_005090 +2010_005093 +2010_005094 +2010_005098 +2010_005099 +2010_005100 +2010_005101 +2010_005106 +2010_005110 +2010_005111 +2010_005119 +2010_005127 +2010_005128 +2010_005129 +2010_005133 +2010_005134 +2010_005147 +2010_005149 +2010_005155 +2010_005161 +2010_005170 +2010_005182 +2010_005183 +2010_005190 +2010_005193 +2010_005198 +2010_005199 +2010_005201 +2010_005202 +2010_005211 +2010_005213 +2010_005216 +2010_005217 +2010_005223 +2010_005229 +2010_005232 +2010_005236 +2010_005238 +2010_005241 +2010_005253 +2010_005257 +2010_005258 +2010_005260 +2010_005261 +2010_005266 +2010_005270 +2010_005273 +2010_005274 +2010_005275 +2010_005276 +2010_005277 +2010_005279 +2010_005297 +2010_005299 +2010_005301 +2010_005303 +2010_005306 +2010_005308 +2010_005309 +2010_005310 +2010_005312 +2010_005317 +2010_005318 +2010_005320 +2010_005349 +2010_005350 +2010_005352 +2010_005359 +2010_005361 +2010_005364 +2010_005365 +2010_005371 +2010_005376 +2010_005377 +2010_005384 +2010_005385 +2010_005386 +2010_005388 +2010_005389 +2010_005391 +2010_005393 +2010_005402 +2010_005403 +2010_005408 +2010_005409 +2010_005415 +2010_005417 +2010_005419 +2010_005426 +2010_005429 +2010_005434 +2010_005437 +2010_005442 +2010_005450 +2010_005457 +2010_005458 +2010_005462 +2010_005466 +2010_005468 +2010_005471 +2010_005475 +2010_005489 +2010_005492 +2010_005494 +2010_005497 +2010_005498 +2010_005500 +2010_005505 +2010_005506 +2010_005511 +2010_005512 +2010_005513 +2010_005518 +2010_005519 +2010_005522 +2010_005535 +2010_005536 +2010_005540 +2010_005546 +2010_005557 +2010_005559 +2010_005561 +2010_005565 +2010_005570 +2010_005571 +2010_005573 +2010_005578 +2010_005584 +2010_005585 +2010_005588 +2010_005591 +2010_005593 +2010_005595 +2010_005596 +2010_005597 +2010_005601 +2010_005603 +2010_005604 +2010_005608 +2010_005614 +2010_005615 +2010_005616 +2010_005619 +2010_005627 +2010_005628 +2010_005629 +2010_005640 +2010_005643 +2010_005646 +2010_005652 +2010_005663 +2010_005665 +2010_005668 +2010_005669 +2010_005670 +2010_005672 +2010_005678 +2010_005683 +2010_005684 +2010_005696 +2010_005700 +2010_005715 +2010_005716 +2010_005721 +2010_005723 +2010_005725 +2010_005732 +2010_005734 +2010_005735 +2010_005736 +2010_005740 +2010_005744 +2010_005746 +2010_005748 +2010_005750 +2010_005753 +2010_005755 +2010_005758 +2010_005770 +2010_005775 +2010_005776 +2010_005782 +2010_005785 +2010_005791 +2010_005794 +2010_005796 +2010_005800 +2010_005805 +2010_005807 +2010_005810 +2010_005816 +2010_005820 +2010_005821 +2010_005823 +2010_005825 +2010_005826 +2010_005830 +2010_005835 +2010_005836 +2010_005840 +2010_005841 +2010_005845 +2010_005847 +2010_005854 +2010_005855 +2010_005865 +2010_005867 +2010_005874 +2010_005875 +2010_005876 +2010_005891 +2010_005892 +2010_005898 +2010_005904 +2010_005906 +2010_005909 +2010_005919 +2010_005921 +2010_005927 +2010_005928 +2010_005929 +2010_005930 +2010_005932 +2010_005935 +2010_005942 +2010_005948 +2010_005949 +2010_005951 +2010_005952 +2010_005954 +2010_005957 +2010_005958 +2010_005959 +2010_005960 +2010_005967 +2010_005968 +2010_005972 +2010_005974 +2010_005975 +2010_005978 +2010_005982 +2010_005984 +2010_005985 +2010_005986 +2010_005987 +2010_005995 +2010_005996 +2010_006009 +2010_006012 +2010_006015 +2010_006023 +2010_006028 +2010_006040 +2010_006042 +2010_006050 +2010_006063 +2010_006066 +2010_006067 +2010_006073 +2010_006078 +2010_006079 +2011_000003 +2011_000006 +2011_000012 +2011_000017 +2011_000022 +2011_000025 +2011_000027 +2011_000028 +2011_000030 +2011_000041 +2011_000044 +2011_000048 +2011_000052 +2011_000053 +2011_000058 +2011_000068 +2011_000069 +2011_000072 +2011_000095 +2011_000105 +2011_000108 +2011_000116 +2011_000122 +2011_000137 +2011_000138 +2011_000145 +2011_000149 +2011_000152 +2011_000176 +2011_000181 +2011_000182 +2011_000192 +2011_000196 +2011_000197 +2011_000208 +2011_000216 +2011_000219 +2011_000220 +2011_000221 +2011_000222 +2011_000224 +2011_000228 +2011_000233 +2011_000241 +2011_000243 +2011_000249 +2011_000250 +2011_000252 +2011_000258 +2011_000267 +2011_000268 +2011_000269 +2011_000277 +2011_000278 +2011_000282 +2011_000285 +2011_000286 +2011_000290 +2011_000293 +2011_000297 +2011_000305 +2011_000317 +2011_000324 +2011_000329 +2011_000342 +2011_000343 +2011_000345 +2011_000347 +2011_000359 +2011_000361 +2011_000362 +2011_000370 +2011_000375 +2011_000376 +2011_000379 +2011_000382 +2011_000383 +2011_000385 +2011_000388 +2011_000392 +2011_000397 +2011_000398 +2011_000399 +2011_000400 +2011_000413 +2011_000416 +2011_000420 +2011_000428 +2011_000430 +2011_000434 +2011_000442 +2011_000444 +2011_000449 +2011_000450 +2011_000453 +2011_000454 +2011_000457 +2011_000461 +2011_000465 +2011_000468 +2011_000469 +2011_000472 +2011_000475 +2011_000485 +2011_000491 +2011_000492 +2011_000494 +2011_000496 +2011_000499 +2011_000502 +2011_000505 +2011_000509 +2011_000513 +2011_000520 +2011_000531 +2011_000534 +2011_000538 +2011_000542 +2011_000550 +2011_000551 +2011_000556 +2011_000558 +2011_000560 +2011_000565 +2011_000567 +2011_000572 +2011_000573 +2011_000577 +2011_000578 +2011_000579 +2011_000586 +2011_000589 +2011_000594 +2011_000596 +2011_000621 +2011_000628 +2011_000629 +2011_000631 +2011_000637 +2011_000641 +2011_000642 +2011_000646 +2011_000651 +2011_000652 +2011_000655 +2011_000657 +2011_000673 +2011_000675 +2011_000682 +2011_000684 +2011_000689 +2011_000692 +2011_000698 +2011_000701 +2011_000703 +2011_000704 +2011_000711 +2011_000713 +2011_000725 +2011_000730 +2011_000731 +2011_000748 +2011_000755 +2011_000757 +2011_000758 +2011_000759 +2011_000763 +2011_000768 +2011_000769 +2011_000771 +2011_000788 +2011_000790 +2011_000791 +2011_000793 +2011_000800 +2011_000804 +2011_000806 +2011_000815 +2011_000819 +2011_000820 +2011_000823 +2011_000827 +2011_000828 +2011_000829 +2011_000831 +2011_000834 +2011_000837 +2011_000839 +2011_000840 +2011_000845 +2011_000847 +2011_000848 +2011_000855 +2011_000858 +2011_000859 +2011_000875 +2011_000882 +2011_000885 +2011_000893 +2011_000895 +2011_000898 +2011_000899 +2011_000920 +2011_000922 +2011_000934 +2011_000940 +2011_000944 +2011_000947 +2011_000954 +2011_000971 +2011_000973 +2011_000975 +2011_000979 +2011_000981 +2011_000982 +2011_000983 +2011_000987 +2011_000991 +2011_000996 +2011_000997 +2011_000999 +2011_001001 +2011_001004 +2011_001009 +2011_001010 +2011_001011 +2011_001015 +2011_001016 +2011_001022 +2011_001023 +2011_001027 +2011_001028 +2011_001030 +2011_001031 +2011_001033 +2011_001034 +2011_001052 +2011_001055 +2011_001062 +2011_001066 +2011_001073 +2011_001079 +2011_001080 +2011_001091 +2011_001093 +2011_001097 +2011_001107 +2011_001117 +2011_001123 +2011_001127 +2011_001133 +2011_001134 +2011_001135 +2011_001136 +2011_001139 +2011_001144 +2011_001150 +2011_001153 +2011_001163 +2011_001166 +2011_001168 +2011_001169 +2011_001173 +2011_001175 +2011_001176 +2011_001188 +2011_001189 +2011_001192 +2011_001193 +2011_001198 +2011_001208 +2011_001211 +2011_001215 +2011_001216 +2011_001220 +2011_001227 +2011_001238 +2011_001240 +2011_001246 +2011_001253 +2011_001254 +2011_001255 +2011_001257 +2011_001259 +2011_001270 +2011_001272 +2011_001277 +2011_001285 +2011_001286 +2011_001302 +2011_001310 +2011_001318 +2011_001320 +2011_001323 +2011_001333 +2011_001336 +2011_001344 +2011_001354 +2011_001357 +2011_001369 +2011_001373 +2011_001381 +2011_001382 +2011_001384 +2011_001394 +2011_001400 +2011_001402 +2011_001411 +2011_001412 +2011_001414 +2011_001422 +2011_001424 +2011_001432 +2011_001449 +2011_001451 +2011_001455 +2011_001456 +2011_001463 +2011_001464 +2011_001466 +2011_001475 +2011_001476 +2011_001479 +2011_001480 +2011_001498 +2011_001503 +2011_001505 +2011_001510 +2011_001514 +2011_001519 +2011_001526 +2011_001532 +2011_001536 +2011_001537 +2011_001538 +2011_001542 +2011_001547 +2011_001549 +2011_001557 +2011_001560 +2011_001566 +2011_001571 +2011_001572 +2011_001582 +2011_001586 +2011_001599 +2011_001600 +2011_001602 +2011_001605 +2011_001606 +2011_001611 +2011_001616 +2011_001621 +2011_001622 +2011_001625 +2011_001629 +2011_001632 +2011_001643 +2011_001647 +2011_001649 +2011_001650 +2011_001652 +2011_001653 +2011_001656 +2011_001662 +2011_001663 +2011_001666 +2011_001671 +2011_001673 +2011_001679 +2011_001689 +2011_001694 +2011_001695 +2011_001698 +2011_001700 +2011_001710 +2011_001715 +2011_001716 +2011_001727 +2011_001730 +2011_001732 +2011_001733 +2011_001739 +2011_001740 +2011_001753 +2011_001754 +2011_001755 +2011_001764 +2011_001765 +2011_001766 +2011_001769 +2011_001776 +2011_001779 +2011_001789 +2011_001790 +2011_001791 +2011_001796 +2011_001799 +2011_001805 +2011_001810 +2011_001811 +2011_001826 +2011_001833 +2011_001840 +2011_001855 +2011_001866 +2011_001871 +2011_001872 +2011_001875 +2011_001884 +2011_001885 +2011_001886 +2011_001889 +2011_001891 +2011_001893 +2011_001895 +2011_001896 +2011_001901 +2011_001902 +2011_001904 +2011_001906 +2011_001920 +2011_001922 +2011_001924 +2011_001926 +2011_001928 +2011_001929 +2011_001930 +2011_001937 +2011_001938 +2011_001944 +2011_001949 +2011_001950 +2011_001952 +2011_001956 +2011_001959 +2011_001961 +2011_001964 +2011_001967 +2011_001971 +2011_001972 +2011_001974 +2011_001977 +2011_001987 +2011_001991 +2011_002005 +2011_002006 +2011_002012 +2011_002022 +2011_002027 +2011_002031 +2011_002034 +2011_002039 +2011_002046 +2011_002049 +2011_002050 +2011_002053 +2011_002055 +2011_002062 +2011_002063 +2011_002073 +2011_002085 +2011_002096 +2011_002097 +2011_002106 +2011_002107 +2011_002111 +2011_002113 +2011_002114 +2011_002119 +2011_002131 +2011_002134 +2011_002135 +2011_002142 +2011_002143 +2011_002144 +2011_002147 +2011_002148 +2011_002149 +2011_002167 +2011_002177 +2011_002179 +2011_002186 +2011_002189 +2011_002211 +2011_002218 +2011_002222 +2011_002224 +2011_002227 +2011_002228 +2011_002236 +2011_002237 +2011_002239 +2011_002245 +2011_002246 +2011_002251 +2011_002252 +2011_002253 +2011_002265 +2011_002268 +2011_002273 +2011_002278 +2011_002281 +2011_002284 +2011_002291 +2011_002300 +2011_002303 +2011_002318 +2011_002335 +2011_002341 +2011_002346 +2011_002347 +2011_002348 +2011_002350 +2011_002359 +2011_002381 +2011_002385 +2011_002387 +2011_002388 +2011_002389 +2011_002394 +2011_002397 +2011_002398 +2011_002402 +2011_002410 +2011_002413 +2011_002418 +2011_002419 +2011_002420 +2011_002421 +2011_002422 +2011_002433 +2011_002435 +2011_002436 +2011_002443 +2011_002447 +2011_002448 +2011_002455 +2011_002457 +2011_002458 +2011_002460 +2011_002461 +2011_002462 +2011_002464 +2011_002470 +2011_002474 +2011_002476 +2011_002484 +2011_002488 +2011_002492 +2011_002503 +2011_002504 +2011_002511 +2011_002514 +2011_002526 +2011_002528 +2011_002533 +2011_002543 +2011_002551 +2011_002552 +2011_002553 +2011_002554 +2011_002555 +2011_002559 +2011_002560 +2011_002561 +2011_002567 +2011_002568 +2011_002571 +2011_002584 +2011_002585 +2011_002590 +2011_002594 +2011_002598 +2011_002601 +2011_002606 +2011_002609 +2011_002614 +2011_002616 +2011_002618 +2011_002620 +2011_002636 +2011_002638 +2011_002649 +2011_002650 +2011_002652 +2011_002656 +2011_002657 +2011_002658 +2011_002661 +2011_002664 +2011_002673 +2011_002676 +2011_002677 +2011_002697 +2011_002706 +2011_002709 +2011_002715 +2011_002717 +2011_002719 +2011_002724 +2011_002726 +2011_002746 +2011_002748 +2011_002752 +2011_002756 +2011_002767 +2011_002770 +2011_002775 +2011_002776 +2011_002779 +2011_002780 +2011_002782 +2011_002790 +2011_002795 +2011_002798 +2011_002803 +2011_002808 +2011_002811 +2011_002814 +2011_002818 +2011_002821 +2011_002823 +2011_002826 +2011_002834 +2011_002842 +2011_002851 +2011_002852 +2011_002867 +2011_002872 +2011_002873 +2011_002881 +2011_002884 +2011_002889 +2011_002908 +2011_002911 +2011_002912 +2011_002913 +2011_002917 +2011_002920 +2011_002921 +2011_002924 +2011_002927 +2011_002930 +2011_002932 +2011_002935 +2011_002937 +2011_002938 +2011_002940 +2011_002942 +2011_002947 +2011_002949 +2011_002953 +2011_002956 +2011_002958 +2011_002965 +2011_002966 +2011_002969 +2011_002974 +2011_002979 +2011_002987 +2011_002988 +2011_003002 +2011_003005 +2011_003010 +2011_003016 +2011_003020 +2011_003025 +2011_003034 +2011_003038 +2011_003041 +2011_003044 +2011_003047 +2011_003048 +2011_003049 +2011_003054 +2011_003057 +2011_003063 +2011_003065 +2011_003066 +2011_003073 +2011_003074 +2011_003078 +2011_003081 +2011_003091 +2011_003109 +2011_003121 +2011_003124 +2011_003132 +2011_003134 +2011_003138 +2011_003141 +2011_003148 +2011_003150 +2011_003151 +2011_003154 +2011_003158 +2011_003159 +2011_003162 +2011_003171 +2011_003177 +2011_003183 +2011_003184 +2011_003187 +2011_003188 +2011_003192 +2011_003194 +2011_003216 +2011_003223 +2011_003230 +2011_003236 +2011_003238 +2011_003246 +2011_003247 +2011_003253 +2011_003255 +2011_003259 +2011_003274 +2011_003276 diff --git a/ImageSets/Main/trainval.txt b/ImageSets/Main/trainval.txt new file mode 100644 index 0000000..2994c00 --- /dev/null +++ b/ImageSets/Main/trainval.txt @@ -0,0 +1,16551 @@ +000005 +000007 +000009 +000012 +000016 +000017 +000019 +000020 +000021 +000023 +000024 +000026 +000030 +000032 +000033 +000034 +000035 +000036 +000039 +000041 +000042 +000044 +000046 +000047 +000048 +000050 +000051 +000052 +000060 +000061 +000063 +000064 +000065 +000066 +000072 +000073 +000077 +000078 +000081 +000083 +000089 +000091 +000093 +000095 +000099 +000101 +000102 +000104 +000107 +000109 +000110 +000112 +000113 +000117 +000118 +000120 +000121 +000122 +000123 +000125 +000129 +000130 +000131 +000132 +000133 +000134 +000138 +000140 +000141 +000142 +000143 +000146 +000147 +000150 +000153 +000154 +000156 +000158 +000159 +000161 +000162 +000163 +000164 +000165 +000169 +000170 +000171 +000173 +000174 +000177 +000180 +000184 +000187 +000189 +000190 +000192 +000193 +000194 +000198 +000200 +000203 +000207 +000208 +000209 +000210 +000211 +000214 +000215 +000218 +000219 +000220 +000221 +000222 +000224 +000225 +000228 +000229 +000232 +000233 +000235 +000236 +000241 +000242 +000244 +000245 +000246 +000249 +000250 +000251 +000256 +000257 +000259 +000262 +000263 +000266 +000268 +000269 +000270 +000275 +000276 +000278 +000282 +000285 +000288 +000289 +000294 +000296 +000298 +000302 +000303 +000304 +000305 +000306 +000307 +000308 +000311 +000312 +000317 +000318 +000320 +000321 +000322 +000323 +000325 +000328 +000329 +000331 +000332 +000334 +000336 +000337 +000338 +000340 +000343 +000344 +000347 +000349 +000352 +000354 +000355 +000359 +000363 +000367 +000370 +000372 +000373 +000374 +000379 +000380 +000381 +000382 +000387 +000391 +000394 +000395 +000396 +000400 +000403 +000404 +000406 +000407 +000408 +000411 +000416 +000417 +000419 +000420 +000424 +000427 +000428 +000430 +000431 +000433 +000435 +000438 +000439 +000443 +000446 +000448 +000450 +000454 +000459 +000460 +000461 +000462 +000463 +000464 +000468 +000469 +000470 +000474 +000476 +000477 +000480 +000482 +000483 +000484 +000486 +000489 +000491 +000492 +000494 +000496 +000498 +000499 +000500 +000501 +000503 +000508 +000509 +000513 +000514 +000515 +000516 +000518 +000519 +000520 +000522 +000523 +000524 +000525 +000526 +000528 +000530 +000531 +000535 +000537 +000540 +000541 +000543 +000544 +000545 +000549 +000550 +000552 +000554 +000555 +000559 +000563 +000564 +000565 +000577 +000579 +000581 +000582 +000583 +000588 +000589 +000590 +000591 +000592 +000597 +000598 +000599 +000601 +000605 +000608 +000609 +000610 +000612 +000613 +000619 +000620 +000622 +000625 +000626 +000628 +000632 +000633 +000635 +000637 +000645 +000647 +000648 +000653 +000654 +000656 +000657 +000660 +000661 +000663 +000667 +000671 +000672 +000675 +000676 +000677 +000680 +000682 +000684 +000685 +000686 +000688 +000689 +000690 +000694 +000695 +000699 +000700 +000702 +000705 +000707 +000709 +000710 +000711 +000712 +000713 +000714 +000717 +000720 +000726 +000728 +000729 +000730 +000731 +000733 +000738 +000739 +000740 +000742 +000746 +000748 +000750 +000752 +000753 +000754 +000755 +000756 +000760 +000761 +000763 +000764 +000767 +000768 +000770 +000771 +000772 +000774 +000776 +000777 +000780 +000782 +000786 +000787 +000791 +000793 +000794 +000796 +000797 +000799 +000800 +000802 +000804 +000805 +000806 +000808 +000810 +000812 +000814 +000815 +000816 +000818 +000820 +000822 +000823 +000826 +000827 +000828 +000829 +000830 +000831 +000832 +000834 +000842 +000843 +000845 +000847 +000848 +000849 +000850 +000851 +000854 +000855 +000857 +000859 +000860 +000862 +000863 +000865 +000867 +000868 +000871 +000872 +000874 +000876 +000878 +000879 +000880 +000882 +000885 +000887 +000888 +000889 +000892 +000895 +000896 +000898 +000899 +000900 +000902 +000903 +000904 +000906 +000908 +000911 +000912 +000915 +000917 +000918 +000919 +000920 +000921 +000923 +000926 +000929 +000931 +000934 +000935 +000936 +000937 +000943 +000946 +000947 +000948 +000949 +000950 +000951 +000954 +000958 +000962 +000964 +000965 +000966 +000967 +000971 +000972 +000973 +000977 +000980 +000982 +000987 +000989 +000991 +000993 +000996 +000997 +000999 +001001 +001002 +001004 +001008 +001009 +001010 +001011 +001012 +001014 +001015 +001017 +001018 +001024 +001027 +001028 +001036 +001041 +001042 +001043 +001045 +001050 +001052 +001053 +001056 +001057 +001060 +001061 +001062 +001064 +001066 +001068 +001069 +001071 +001072 +001073 +001074 +001077 +001078 +001079 +001082 +001083 +001084 +001091 +001092 +001093 +001097 +001101 +001102 +001104 +001106 +001107 +001109 +001110 +001112 +001113 +001119 +001121 +001124 +001125 +001127 +001129 +001130 +001136 +001137 +001140 +001142 +001143 +001144 +001145 +001147 +001148 +001149 +001151 +001152 +001154 +001156 +001158 +001160 +001161 +001164 +001166 +001168 +001170 +001171 +001172 +001174 +001175 +001176 +001182 +001184 +001185 +001186 +001187 +001191 +001192 +001194 +001199 +001200 +001201 +001203 +001204 +001205 +001206 +001207 +001209 +001211 +001212 +001214 +001215 +001221 +001224 +001225 +001226 +001229 +001230 +001231 +001233 +001234 +001236 +001237 +001239 +001240 +001241 +001247 +001248 +001250 +001254 +001258 +001259 +001260 +001263 +001265 +001266 +001268 +001269 +001270 +001272 +001273 +001274 +001277 +001279 +001281 +001284 +001286 +001287 +001288 +001289 +001290 +001292 +001293 +001294 +001298 +001299 +001304 +001309 +001310 +001311 +001312 +001314 +001315 +001316 +001323 +001324 +001325 +001326 +001327 +001330 +001332 +001333 +001334 +001337 +001341 +001343 +001345 +001346 +001348 +001350 +001352 +001360 +001361 +001362 +001364 +001365 +001371 +001375 +001378 +001383 +001384 +001385 +001386 +001387 +001388 +001390 +001393 +001395 +001397 +001400 +001402 +001404 +001405 +001406 +001408 +001409 +001413 +001414 +001418 +001420 +001421 +001426 +001427 +001430 +001432 +001434 +001436 +001439 +001441 +001442 +001443 +001444 +001445 +001450 +001451 +001453 +001455 +001457 +001460 +001463 +001464 +001465 +001466 +001467 +001468 +001470 +001472 +001475 +001479 +001480 +001481 +001483 +001484 +001485 +001486 +001488 +001490 +001492 +001493 +001494 +001497 +001498 +001499 +001501 +001504 +001509 +001510 +001512 +001514 +001515 +001517 +001521 +001522 +001523 +001524 +001526 +001528 +001529 +001531 +001532 +001536 +001537 +001539 +001541 +001543 +001544 +001545 +001548 +001553 +001554 +001555 +001556 +001557 +001559 +001561 +001563 +001565 +001571 +001576 +001577 +001579 +001580 +001582 +001586 +001588 +001590 +001593 +001594 +001595 +001597 +001598 +001603 +001604 +001607 +001608 +001610 +001611 +001612 +001614 +001617 +001618 +001622 +001627 +001628 +001630 +001632 +001633 +001636 +001638 +001640 +001642 +001643 +001647 +001649 +001650 +001651 +001653 +001654 +001661 +001662 +001669 +001673 +001675 +001676 +001677 +001678 +001680 +001682 +001683 +001684 +001685 +001686 +001688 +001689 +001690 +001691 +001693 +001699 +001707 +001708 +001711 +001713 +001714 +001717 +001718 +001721 +001723 +001724 +001725 +001726 +001727 +001729 +001730 +001732 +001733 +001734 +001738 +001739 +001741 +001746 +001747 +001749 +001750 +001752 +001754 +001755 +001756 +001758 +001759 +001761 +001765 +001766 +001768 +001771 +001772 +001775 +001777 +001778 +001780 +001782 +001784 +001785 +001787 +001789 +001793 +001795 +001797 +001799 +001800 +001801 +001806 +001807 +001809 +001810 +001816 +001818 +001821 +001825 +001827 +001828 +001830 +001832 +001833 +001834 +001836 +001837 +001840 +001841 +001842 +001843 +001845 +001847 +001849 +001853 +001854 +001855 +001858 +001860 +001861 +001862 +001864 +001870 +001872 +001875 +001877 +001878 +001881 +001882 +001887 +001888 +001892 +001894 +001896 +001898 +001899 +001901 +001902 +001903 +001904 +001906 +001907 +001911 +001915 +001918 +001920 +001922 +001927 +001928 +001930 +001931 +001932 +001933 +001934 +001936 +001937 +001938 +001940 +001941 +001944 +001945 +001948 +001950 +001952 +001954 +001958 +001960 +001962 +001963 +001964 +001970 +001971 +001972 +001976 +001977 +001978 +001980 +001981 +001982 +001985 +001989 +001995 +001999 +002000 +002001 +002002 +002004 +002006 +002011 +002012 +002015 +002019 +002020 +002021 +002022 +002023 +002024 +002025 +002027 +002030 +002034 +002036 +002037 +002039 +002042 +002043 +002045 +002047 +002049 +002051 +002054 +002055 +002056 +002058 +002061 +002063 +002064 +002067 +002068 +002069 +002070 +002082 +002083 +002086 +002088 +002090 +002091 +002094 +002095 +002096 +002098 +002099 +002101 +002102 +002104 +002108 +002109 +002112 +002114 +002116 +002117 +002120 +002124 +002125 +002126 +002129 +002132 +002134 +002135 +002136 +002139 +002140 +002142 +002145 +002146 +002151 +002152 +002153 +002155 +002156 +002158 +002163 +002165 +002166 +002169 +002170 +002171 +002172 +002174 +002176 +002178 +002179 +002180 +002181 +002182 +002183 +002184 +002186 +002187 +002190 +002191 +002192 +002193 +002194 +002196 +002197 +002199 +002201 +002202 +002208 +002209 +002212 +002213 +002214 +002215 +002218 +002219 +002220 +002221 +002224 +002226 +002228 +002233 +002234 +002237 +002238 +002241 +002244 +002247 +002248 +002249 +002251 +002253 +002255 +002256 +002257 +002259 +002260 +002261 +002263 +002265 +002266 +002267 +002268 +002270 +002272 +002273 +002276 +002277 +002278 +002279 +002280 +002281 +002284 +002285 +002287 +002288 +002290 +002291 +002293 +002300 +002302 +002305 +002306 +002307 +002308 +002310 +002311 +002315 +002318 +002320 +002321 +002323 +002324 +002328 +002329 +002330 +002332 +002333 +002334 +002335 +002337 +002340 +002342 +002343 +002345 +002347 +002348 +002350 +002352 +002354 +002355 +002359 +002361 +002362 +002364 +002366 +002367 +002368 +002369 +002371 +002372 +002373 +002374 +002375 +002376 +002377 +002378 +002382 +002384 +002385 +002387 +002391 +002392 +002393 +002401 +002403 +002404 +002405 +002407 +002410 +002411 +002413 +002415 +002417 +002419 +002420 +002423 +002425 +002427 +002433 +002435 +002436 +002437 +002439 +002441 +002442 +002443 +002444 +002445 +002448 +002450 +002452 +002454 +002456 +002458 +002459 +002460 +002461 +002462 +002465 +002466 +002468 +002470 +002471 +002472 +002476 +002477 +002478 +002479 +002480 +002481 +002483 +002490 +002491 +002492 +002493 +002494 +002496 +002497 +002500 +002501 +002502 +002504 +002505 +002508 +002512 +002513 +002514 +002518 +002519 +002520 +002523 +002524 +002525 +002529 +002533 +002534 +002537 +002539 +002540 +002542 +002544 +002545 +002546 +002547 +002549 +002554 +002555 +002558 +002559 +002561 +002563 +002564 +002565 +002566 +002567 +002569 +002571 +002572 +002578 +002579 +002584 +002585 +002586 +002589 +002590 +002593 +002594 +002595 +002598 +002599 +002600 +002603 +002605 +002606 +002609 +002611 +002613 +002615 +002618 +002621 +002625 +002627 +002632 +002633 +002634 +002635 +002636 +002637 +002641 +002643 +002645 +002646 +002647 +002648 +002649 +002653 +002657 +002658 +002659 +002662 +002664 +002666 +002667 +002668 +002669 +002670 +002675 +002677 +002678 +002680 +002682 +002683 +002684 +002689 +002690 +002691 +002693 +002695 +002696 +002697 +002699 +002702 +002704 +002706 +002709 +002710 +002713 +002714 +002715 +002717 +002718 +002721 +002722 +002723 +002727 +002730 +002732 +002734 +002735 +002737 +002738 +002741 +002744 +002745 +002747 +002749 +002751 +002755 +002757 +002759 +002760 +002762 +002763 +002765 +002766 +002767 +002772 +002774 +002775 +002776 +002778 +002779 +002782 +002783 +002784 +002785 +002786 +002791 +002794 +002795 +002796 +002798 +002800 +002801 +002803 +002804 +002807 +002810 +002812 +002815 +002816 +002817 +002820 +002826 +002827 +002833 +002834 +002835 +002836 +002838 +002841 +002842 +002844 +002845 +002847 +002848 +002854 +002855 +002858 +002859 +002864 +002866 +002867 +002868 +002869 +002870 +002873 +002875 +002879 +002880 +002881 +002884 +002886 +002889 +002891 +002893 +002896 +002899 +002901 +002906 +002910 +002912 +002913 +002914 +002915 +002916 +002917 +002919 +002924 +002931 +002932 +002933 +002934 +002935 +002937 +002938 +002939 +002940 +002941 +002942 +002943 +002944 +002946 +002947 +002952 +002953 +002954 +002956 +002957 +002958 +002960 +002962 +002963 +002965 +002966 +002967 +002969 +002975 +002976 +002977 +002978 +002984 +002986 +002987 +002988 +002989 +002990 +002992 +002994 +002995 +003000 +003002 +003003 +003004 +003005 +003007 +003008 +003009 +003011 +003013 +003015 +003017 +003021 +003023 +003024 +003027 +003028 +003031 +003032 +003034 +003038 +003039 +003042 +003044 +003045 +003047 +003051 +003053 +003054 +003056 +003057 +003058 +003061 +003063 +003064 +003065 +003066 +003072 +003074 +003077 +003078 +003082 +003083 +003085 +003086 +003088 +003089 +003090 +003092 +003093 +003094 +003098 +003100 +003102 +003103 +003105 +003106 +003107 +003108 +003110 +003112 +003116 +003117 +003118 +003120 +003121 +003122 +003124 +003126 +003127 +003129 +003133 +003134 +003135 +003137 +003138 +003140 +003142 +003145 +003146 +003147 +003149 +003150 +003154 +003155 +003157 +003159 +003161 +003162 +003163 +003164 +003165 +003169 +003170 +003175 +003176 +003177 +003178 +003181 +003183 +003184 +003185 +003186 +003188 +003189 +003194 +003195 +003199 +003200 +003202 +003204 +003205 +003207 +003210 +003211 +003213 +003214 +003216 +003218 +003219 +003223 +003228 +003229 +003231 +003233 +003236 +003239 +003240 +003242 +003243 +003244 +003247 +003250 +003253 +003254 +003255 +003256 +003258 +003259 +003260 +003261 +003262 +003269 +003270 +003271 +003272 +003273 +003274 +003279 +003280 +003282 +003284 +003285 +003290 +003292 +003293 +003294 +003296 +003299 +003300 +003301 +003303 +003307 +003308 +003311 +003313 +003316 +003320 +003325 +003327 +003330 +003331 +003335 +003336 +003337 +003338 +003339 +003343 +003344 +003349 +003350 +003351 +003354 +003355 +003356 +003359 +003360 +003362 +003363 +003365 +003367 +003369 +003370 +003373 +003374 +003376 +003377 +003379 +003380 +003382 +003386 +003390 +003391 +003392 +003395 +003396 +003397 +003398 +003401 +003403 +003404 +003406 +003407 +003408 +003410 +003412 +003413 +003415 +003416 +003417 +003419 +003420 +003421 +003422 +003424 +003425 +003429 +003430 +003433 +003435 +003436 +003439 +003441 +003443 +003444 +003449 +003450 +003451 +003452 +003453 +003455 +003458 +003461 +003462 +003464 +003465 +003466 +003468 +003469 +003470 +003477 +003484 +003487 +003489 +003491 +003492 +003493 +003496 +003497 +003499 +003500 +003506 +003508 +003509 +003510 +003511 +003516 +003518 +003519 +003521 +003522 +003524 +003525 +003528 +003529 +003530 +003536 +003537 +003539 +003546 +003548 +003549 +003550 +003551 +003554 +003555 +003556 +003564 +003565 +003566 +003567 +003575 +003576 +003577 +003580 +003585 +003586 +003587 +003588 +003589 +003593 +003594 +003596 +003597 +003599 +003603 +003604 +003605 +003606 +003608 +003609 +003611 +003614 +003618 +003620 +003621 +003622 +003623 +003625 +003627 +003628 +003629 +003632 +003634 +003635 +003636 +003638 +003639 +003640 +003642 +003644 +003645 +003646 +003648 +003651 +003654 +003655 +003656 +003657 +003658 +003660 +003662 +003663 +003664 +003667 +003669 +003671 +003673 +003674 +003675 +003678 +003679 +003681 +003684 +003685 +003688 +003690 +003691 +003694 +003695 +003696 +003698 +003699 +003700 +003703 +003704 +003705 +003706 +003708 +003709 +003711 +003713 +003714 +003717 +003721 +003722 +003727 +003729 +003732 +003735 +003740 +003743 +003748 +003749 +003750 +003751 +003752 +003753 +003754 +003758 +003759 +003760 +003763 +003767 +003772 +003773 +003774 +003779 +003780 +003781 +003783 +003784 +003786 +003788 +003790 +003791 +003792 +003793 +003796 +003797 +003798 +003803 +003806 +003807 +003808 +003809 +003811 +003814 +003817 +003818 +003820 +003821 +003824 +003826 +003827 +003828 +003830 +003834 +003835 +003837 +003838 +003844 +003845 +003846 +003847 +003848 +003849 +003855 +003856 +003857 +003859 +003860 +003861 +003863 +003865 +003866 +003868 +003869 +003871 +003872 +003874 +003876 +003877 +003879 +003885 +003886 +003887 +003889 +003890 +003891 +003895 +003898 +003899 +003905 +003907 +003911 +003912 +003913 +003915 +003918 +003919 +003921 +003923 +003924 +003926 +003932 +003935 +003936 +003937 +003939 +003941 +003945 +003946 +003947 +003948 +003949 +003953 +003954 +003956 +003957 +003960 +003961 +003963 +003965 +003966 +003969 +003970 +003971 +003973 +003974 +003979 +003983 +003984 +003986 +003987 +003988 +003990 +003991 +003992 +003993 +003994 +003996 +003997 +003998 +004003 +004005 +004008 +004009 +004010 +004011 +004012 +004013 +004014 +004015 +004016 +004017 +004019 +004020 +004023 +004025 +004028 +004031 +004033 +004034 +004035 +004037 +004039 +004046 +004047 +004051 +004052 +004057 +004058 +004060 +004066 +004067 +004069 +004073 +004075 +004076 +004077 +004082 +004085 +004087 +004089 +004091 +004092 +004093 +004095 +004100 +004102 +004105 +004106 +004108 +004110 +004111 +004113 +004117 +004120 +004121 +004122 +004129 +004131 +004133 +004135 +004136 +004137 +004138 +004140 +004141 +004142 +004143 +004145 +004146 +004148 +004149 +004150 +004152 +004158 +004163 +004164 +004168 +004169 +004170 +004171 +004174 +004178 +004185 +004186 +004189 +004190 +004191 +004192 +004193 +004194 +004195 +004196 +004200 +004201 +004203 +004204 +004205 +004209 +004212 +004215 +004220 +004221 +004223 +004224 +004228 +004229 +004230 +004231 +004232 +004237 +004239 +004241 +004242 +004244 +004246 +004247 +004253 +004255 +004256 +004257 +004258 +004259 +004263 +004264 +004265 +004269 +004270 +004271 +004272 +004273 +004274 +004275 +004279 +004280 +004281 +004283 +004284 +004286 +004287 +004291 +004292 +004293 +004295 +004296 +004298 +004300 +004303 +004304 +004307 +004310 +004312 +004315 +004318 +004321 +004322 +004323 +004325 +004326 +004327 +004329 +004331 +004333 +004338 +004339 +004341 +004345 +004346 +004347 +004349 +004351 +004352 +004354 +004356 +004359 +004360 +004361 +004364 +004365 +004367 +004368 +004369 +004370 +004371 +004372 +004376 +004379 +004380 +004384 +004386 +004387 +004389 +004390 +004391 +004392 +004396 +004397 +004404 +004405 +004409 +004411 +004421 +004423 +004424 +004429 +004430 +004432 +004433 +004434 +004436 +004437 +004438 +004439 +004441 +004446 +004450 +004452 +004455 +004457 +004459 +004463 +004464 +004466 +004468 +004470 +004471 +004474 +004479 +004481 +004484 +004487 +004488 +004490 +004493 +004494 +004495 +004496 +004498 +004499 +004500 +004502 +004507 +004508 +004509 +004510 +004512 +004514 +004517 +004518 +004519 +004520 +004524 +004526 +004527 +004528 +004530 +004532 +004535 +004537 +004539 +004540 +004542 +004544 +004548 +004549 +004551 +004552 +004553 +004555 +004558 +004562 +004563 +004565 +004566 +004570 +004571 +004574 +004576 +004579 +004581 +004584 +004585 +004587 +004588 +004591 +004592 +004595 +004597 +004600 +004601 +004604 +004605 +004606 +004607 +004609 +004611 +004612 +004618 +004622 +004623 +004625 +004626 +004627 +004628 +004630 +004631 +004632 +004634 +004636 +004643 +004644 +004647 +004648 +004649 +004651 +004652 +004653 +004654 +004655 +004656 +004660 +004662 +004671 +004672 +004673 +004674 +004675 +004676 +004679 +004682 +004683 +004685 +004686 +004687 +004689 +004691 +004692 +004693 +004694 +004699 +004701 +004702 +004705 +004706 +004707 +004708 +004710 +004714 +004715 +004718 +004719 +004722 +004723 +004727 +004732 +004735 +004737 +004742 +004743 +004746 +004747 +004748 +004750 +004753 +004754 +004760 +004761 +004768 +004770 +004773 +004776 +004777 +004779 +004782 +004783 +004785 +004786 +004788 +004789 +004790 +004792 +004793 +004794 +004796 +004797 +004799 +004801 +004805 +004808 +004812 +004814 +004815 +004816 +004818 +004823 +004825 +004826 +004828 +004830 +004831 +004832 +004834 +004836 +004837 +004839 +004840 +004841 +004842 +004846 +004848 +004849 +004850 +004852 +004856 +004857 +004859 +004863 +004866 +004867 +004868 +004869 +004872 +004873 +004876 +004878 +004879 +004882 +004885 +004886 +004890 +004895 +004896 +004897 +004898 +004902 +004903 +004905 +004907 +004910 +004911 +004912 +004913 +004916 +004926 +004928 +004929 +004931 +004935 +004936 +004938 +004939 +004943 +004946 +004948 +004950 +004951 +004953 +004954 +004955 +004956 +004958 +004960 +004961 +004962 +004963 +004966 +004967 +004968 +004972 +004973 +004974 +004976 +004977 +004982 +004983 +004984 +004985 +004986 +004987 +004990 +004991 +004992 +004994 +004995 +004997 +004998 +004999 +005001 +005003 +005004 +005006 +005007 +005014 +005016 +005018 +005020 +005023 +005024 +005026 +005027 +005028 +005029 +005032 +005033 +005036 +005037 +005039 +005042 +005045 +005047 +005052 +005054 +005055 +005056 +005057 +005058 +005061 +005062 +005063 +005064 +005065 +005067 +005068 +005071 +005072 +005073 +005077 +005078 +005079 +005081 +005084 +005085 +005086 +005090 +005093 +005094 +005097 +005101 +005102 +005104 +005107 +005108 +005110 +005111 +005114 +005116 +005121 +005122 +005124 +005128 +005129 +005130 +005131 +005134 +005135 +005136 +005138 +005143 +005144 +005145 +005146 +005150 +005153 +005156 +005159 +005160 +005161 +005168 +005169 +005171 +005173 +005175 +005176 +005177 +005179 +005181 +005183 +005185 +005186 +005189 +005190 +005191 +005195 +005199 +005202 +005203 +005208 +005209 +005210 +005212 +005214 +005215 +005217 +005219 +005220 +005222 +005223 +005224 +005229 +005230 +005231 +005236 +005239 +005242 +005244 +005245 +005246 +005248 +005253 +005254 +005257 +005258 +005259 +005260 +005262 +005263 +005264 +005267 +005268 +005269 +005270 +005273 +005274 +005278 +005281 +005283 +005285 +005288 +005290 +005292 +005293 +005297 +005298 +005303 +005304 +005305 +005306 +005307 +005310 +005311 +005312 +005314 +005315 +005318 +005319 +005320 +005325 +005326 +005327 +005328 +005331 +005336 +005337 +005338 +005340 +005343 +005344 +005345 +005346 +005348 +005349 +005350 +005351 +005352 +005355 +005358 +005360 +005363 +005365 +005367 +005368 +005369 +005370 +005371 +005373 +005374 +005378 +005379 +005380 +005383 +005384 +005385 +005387 +005388 +005389 +005391 +005393 +005395 +005396 +005397 +005398 +005404 +005405 +005406 +005407 +005408 +005410 +005413 +005414 +005416 +005417 +005418 +005419 +005420 +005421 +005423 +005424 +005429 +005430 +005431 +005433 +005434 +005436 +005438 +005439 +005440 +005441 +005445 +005448 +005450 +005451 +005453 +005454 +005455 +005457 +005461 +005465 +005467 +005469 +005470 +005471 +005475 +005478 +005481 +005483 +005485 +005486 +005487 +005489 +005496 +005497 +005499 +005507 +005508 +005509 +005510 +005511 +005514 +005515 +005517 +005518 +005519 +005521 +005522 +005524 +005526 +005527 +005530 +005531 +005535 +005536 +005539 +005541 +005542 +005544 +005547 +005549 +005550 +005552 +005554 +005559 +005563 +005566 +005568 +005573 +005574 +005576 +005577 +005579 +005582 +005583 +005584 +005585 +005586 +005588 +005590 +005591 +005592 +005593 +005599 +005600 +005601 +005603 +005605 +005606 +005608 +005609 +005611 +005613 +005614 +005615 +005618 +005620 +005624 +005625 +005629 +005630 +005631 +005636 +005637 +005639 +005640 +005641 +005644 +005645 +005647 +005648 +005652 +005653 +005654 +005655 +005657 +005658 +005660 +005662 +005664 +005668 +005669 +005672 +005674 +005676 +005679 +005680 +005682 +005685 +005686 +005687 +005693 +005695 +005696 +005697 +005699 +005700 +005701 +005702 +005704 +005705 +005710 +005713 +005714 +005715 +005716 +005718 +005719 +005723 +005728 +005729 +005730 +005731 +005732 +005735 +005736 +005738 +005740 +005741 +005742 +005743 +005747 +005749 +005752 +005755 +005756 +005757 +005760 +005761 +005762 +005764 +005765 +005768 +005769 +005773 +005779 +005780 +005781 +005782 +005783 +005784 +005786 +005788 +005789 +005790 +005791 +005794 +005796 +005799 +005803 +005805 +005806 +005811 +005812 +005813 +005814 +005815 +005817 +005818 +005819 +005821 +005824 +005825 +005826 +005828 +005829 +005830 +005831 +005836 +005838 +005839 +005840 +005841 +005843 +005845 +005850 +005851 +005852 +005853 +005854 +005856 +005859 +005860 +005861 +005863 +005864 +005867 +005868 +005873 +005874 +005875 +005877 +005878 +005879 +005881 +005884 +005885 +005888 +005889 +005893 +005894 +005895 +005897 +005899 +005901 +005903 +005905 +005906 +005908 +005909 +005910 +005911 +005912 +005914 +005917 +005918 +005919 +005920 +005923 +005928 +005930 +005938 +005940 +005947 +005948 +005951 +005952 +005954 +005956 +005960 +005961 +005963 +005964 +005968 +005970 +005971 +005975 +005979 +005980 +005981 +005983 +005984 +005985 +005988 +005989 +005990 +005991 +005992 +005995 +005996 +005998 +006000 +006001 +006004 +006005 +006009 +006011 +006012 +006018 +006020 +006023 +006025 +006026 +006027 +006028 +006029 +006030 +006033 +006035 +006038 +006041 +006042 +006043 +006045 +006046 +006055 +006058 +006061 +006062 +006065 +006066 +006067 +006069 +006070 +006071 +006073 +006074 +006078 +006079 +006084 +006088 +006089 +006091 +006095 +006096 +006097 +006098 +006100 +006103 +006104 +006105 +006107 +006108 +006111 +006117 +006120 +006123 +006124 +006125 +006128 +006129 +006130 +006131 +006133 +006134 +006135 +006136 +006139 +006140 +006141 +006146 +006148 +006150 +006151 +006153 +006156 +006158 +006159 +006161 +006162 +006163 +006166 +006170 +006171 +006172 +006174 +006175 +006176 +006177 +006179 +006180 +006181 +006183 +006184 +006185 +006187 +006188 +006189 +006190 +006196 +006198 +006201 +006202 +006203 +006206 +006208 +006209 +006210 +006212 +006214 +006215 +006216 +006218 +006219 +006220 +006221 +006222 +006223 +006224 +006225 +006229 +006230 +006233 +006234 +006235 +006236 +006238 +006240 +006241 +006243 +006247 +006249 +006250 +006251 +006252 +006254 +006258 +006259 +006260 +006261 +006262 +006264 +006267 +006269 +006270 +006272 +006275 +006276 +006277 +006279 +006281 +006282 +006284 +006285 +006286 +006289 +006290 +006291 +006295 +006296 +006299 +006300 +006301 +006304 +006305 +006306 +006309 +006314 +006318 +006319 +006320 +006321 +006323 +006325 +006329 +006330 +006335 +006337 +006338 +006339 +006341 +006344 +006346 +006348 +006349 +006350 +006351 +006352 +006353 +006355 +006357 +006362 +006363 +006366 +006367 +006369 +006371 +006374 +006375 +006377 +006381 +006382 +006385 +006387 +006391 +006392 +006395 +006396 +006398 +006400 +006404 +006409 +006411 +006417 +006418 +006419 +006421 +006424 +006425 +006427 +006428 +006429 +006430 +006433 +006434 +006436 +006437 +006438 +006440 +006442 +006443 +006444 +006445 +006447 +006448 +006449 +006450 +006455 +006456 +006458 +006459 +006462 +006463 +006465 +006466 +006468 +006470 +006472 +006473 +006474 +006475 +006476 +006480 +006482 +006483 +006484 +006486 +006488 +006492 +006495 +006497 +006499 +006501 +006503 +006506 +006507 +006509 +006512 +006515 +006519 +006520 +006523 +006524 +006529 +006530 +006532 +006534 +006536 +006538 +006542 +006543 +006547 +006548 +006549 +006550 +006551 +006553 +006556 +006560 +006562 +006564 +006565 +006569 +006570 +006572 +006575 +006576 +006578 +006583 +006584 +006585 +006587 +006588 +006593 +006595 +006597 +006599 +006602 +006603 +006605 +006606 +006609 +006610 +006611 +006612 +006617 +006618 +006619 +006621 +006622 +006625 +006626 +006627 +006628 +006631 +006632 +006635 +006636 +006637 +006638 +006643 +006645 +006647 +006648 +006652 +006654 +006657 +006658 +006660 +006661 +006664 +006666 +006667 +006668 +006670 +006671 +006673 +006674 +006677 +006678 +006679 +006681 +006682 +006684 +006687 +006689 +006690 +006694 +006695 +006696 +006697 +006698 +006699 +006702 +006703 +006704 +006706 +006707 +006708 +006709 +006714 +006718 +006719 +006722 +006725 +006726 +006727 +006730 +006731 +006734 +006735 +006736 +006738 +006739 +006740 +006747 +006748 +006751 +006753 +006755 +006759 +006760 +006761 +006762 +006765 +006766 +006768 +006769 +006772 +006773 +006777 +006781 +006782 +006783 +006784 +006786 +006789 +006794 +006797 +006799 +006800 +006802 +006803 +006805 +006806 +006808 +006810 +006813 +006814 +006819 +006821 +006822 +006824 +006825 +006827 +006828 +006829 +006833 +006835 +006836 +006838 +006839 +006840 +006841 +006842 +006844 +006845 +006847 +006848 +006849 +006850 +006852 +006855 +006858 +006859 +006860 +006862 +006864 +006865 +006866 +006867 +006868 +006869 +006874 +006876 +006878 +006880 +006883 +006884 +006886 +006887 +006892 +006893 +006896 +006899 +006900 +006903 +006908 +006909 +006910 +006911 +006912 +006914 +006916 +006917 +006918 +006919 +006922 +006924 +006930 +006931 +006932 +006933 +006934 +006935 +006939 +006940 +006943 +006944 +006945 +006947 +006948 +006949 +006950 +006952 +006953 +006956 +006958 +006959 +006962 +006963 +006965 +006966 +006968 +006971 +006972 +006976 +006981 +006983 +006987 +006988 +006989 +006990 +006994 +006995 +007002 +007003 +007004 +007006 +007007 +007008 +007009 +007011 +007016 +007018 +007020 +007021 +007022 +007023 +007025 +007029 +007031 +007033 +007035 +007036 +007038 +007039 +007040 +007042 +007045 +007046 +007048 +007049 +007050 +007052 +007054 +007056 +007058 +007059 +007062 +007064 +007065 +007068 +007070 +007071 +007072 +007073 +007074 +007075 +007077 +007078 +007079 +007080 +007084 +007086 +007088 +007089 +007090 +007092 +007093 +007095 +007097 +007100 +007101 +007104 +007105 +007108 +007109 +007113 +007114 +007117 +007121 +007122 +007123 +007125 +007128 +007129 +007130 +007132 +007133 +007138 +007139 +007140 +007141 +007144 +007146 +007147 +007148 +007149 +007150 +007152 +007153 +007154 +007159 +007162 +007163 +007165 +007166 +007167 +007168 +007172 +007174 +007177 +007180 +007182 +007184 +007185 +007187 +007189 +007191 +007193 +007194 +007197 +007200 +007204 +007205 +007208 +007210 +007211 +007212 +007213 +007214 +007215 +007216 +007217 +007219 +007222 +007223 +007224 +007227 +007230 +007234 +007236 +007241 +007243 +007244 +007245 +007247 +007249 +007250 +007256 +007258 +007259 +007260 +007261 +007263 +007266 +007270 +007271 +007274 +007275 +007276 +007279 +007280 +007283 +007284 +007285 +007289 +007292 +007294 +007295 +007296 +007297 +007298 +007299 +007300 +007302 +007305 +007308 +007311 +007314 +007318 +007322 +007323 +007325 +007327 +007329 +007330 +007334 +007336 +007343 +007344 +007346 +007350 +007351 +007356 +007359 +007361 +007363 +007365 +007369 +007370 +007372 +007373 +007374 +007375 +007376 +007381 +007383 +007385 +007388 +007389 +007390 +007394 +007396 +007398 +007408 +007410 +007411 +007413 +007414 +007416 +007417 +007419 +007421 +007422 +007424 +007425 +007427 +007431 +007432 +007433 +007435 +007436 +007437 +007438 +007439 +007443 +007445 +007446 +007448 +007449 +007451 +007454 +007457 +007458 +007460 +007461 +007465 +007466 +007467 +007468 +007470 +007474 +007475 +007477 +007479 +007480 +007481 +007482 +007483 +007484 +007486 +007489 +007490 +007491 +007493 +007497 +007498 +007503 +007506 +007511 +007513 +007517 +007519 +007521 +007523 +007524 +007525 +007526 +007527 +007528 +007530 +007533 +007535 +007536 +007537 +007538 +007540 +007543 +007544 +007546 +007547 +007551 +007555 +007558 +007559 +007563 +007565 +007566 +007568 +007570 +007571 +007572 +007575 +007576 +007578 +007579 +007585 +007586 +007590 +007592 +007594 +007600 +007601 +007603 +007605 +007606 +007611 +007612 +007614 +007615 +007618 +007619 +007621 +007622 +007624 +007626 +007629 +007631 +007633 +007637 +007639 +007640 +007642 +007647 +007649 +007650 +007653 +007654 +007655 +007656 +007657 +007662 +007663 +007664 +007666 +007667 +007668 +007670 +007671 +007672 +007673 +007675 +007677 +007678 +007679 +007680 +007682 +007683 +007685 +007687 +007688 +007691 +007692 +007694 +007696 +007697 +007699 +007702 +007704 +007705 +007709 +007712 +007713 +007715 +007718 +007720 +007721 +007723 +007724 +007727 +007729 +007731 +007732 +007735 +007736 +007740 +007742 +007743 +007745 +007746 +007748 +007749 +007751 +007753 +007754 +007758 +007760 +007762 +007763 +007765 +007767 +007768 +007772 +007773 +007775 +007776 +007777 +007779 +007781 +007786 +007790 +007791 +007793 +007795 +007798 +007799 +007803 +007809 +007810 +007812 +007813 +007814 +007815 +007819 +007820 +007821 +007824 +007826 +007831 +007833 +007834 +007836 +007838 +007840 +007841 +007843 +007845 +007847 +007853 +007854 +007855 +007856 +007857 +007859 +007863 +007864 +007865 +007868 +007869 +007872 +007873 +007876 +007877 +007878 +007883 +007884 +007885 +007886 +007889 +007890 +007897 +007898 +007899 +007900 +007901 +007902 +007905 +007908 +007909 +007910 +007911 +007914 +007915 +007916 +007919 +007920 +007921 +007923 +007924 +007925 +007926 +007928 +007931 +007932 +007933 +007935 +007939 +007940 +007943 +007946 +007947 +007950 +007953 +007954 +007956 +007958 +007959 +007963 +007964 +007968 +007970 +007971 +007974 +007976 +007979 +007980 +007984 +007987 +007991 +007996 +007997 +007998 +007999 +008001 +008002 +008004 +008005 +008008 +008009 +008012 +008017 +008019 +008023 +008024 +008026 +008029 +008031 +008032 +008033 +008036 +008037 +008040 +008042 +008043 +008044 +008048 +008049 +008051 +008053 +008057 +008060 +008061 +008062 +008063 +008064 +008067 +008068 +008069 +008072 +008075 +008076 +008079 +008082 +008083 +008084 +008085 +008086 +008087 +008091 +008093 +008095 +008096 +008098 +008100 +008101 +008103 +008105 +008106 +008107 +008108 +008112 +008115 +008116 +008117 +008121 +008122 +008125 +008127 +008130 +008132 +008137 +008138 +008139 +008140 +008141 +008142 +008144 +008150 +008151 +008159 +008160 +008163 +008164 +008166 +008168 +008169 +008171 +008173 +008174 +008175 +008177 +008180 +008186 +008188 +008189 +008190 +008191 +008197 +008199 +008200 +008202 +008203 +008204 +008208 +008209 +008211 +008213 +008216 +008218 +008220 +008222 +008223 +008224 +008225 +008226 +008229 +008232 +008235 +008236 +008241 +008244 +008248 +008250 +008251 +008252 +008253 +008254 +008258 +008260 +008261 +008262 +008263 +008268 +008269 +008272 +008275 +008279 +008280 +008281 +008282 +008284 +008285 +008292 +008293 +008294 +008295 +008296 +008297 +008299 +008300 +008301 +008302 +008306 +008307 +008310 +008311 +008312 +008313 +008315 +008316 +008317 +008318 +008319 +008320 +008322 +008323 +008326 +008327 +008329 +008332 +008335 +008336 +008338 +008341 +008342 +008345 +008346 +008349 +008351 +008355 +008359 +008360 +008364 +008365 +008368 +008370 +008372 +008374 +008376 +008381 +008384 +008385 +008386 +008387 +008388 +008390 +008391 +008397 +008398 +008403 +008409 +008410 +008413 +008415 +008416 +008422 +008423 +008424 +008425 +008426 +008427 +008429 +008430 +008433 +008434 +008437 +008438 +008442 +008443 +008444 +008445 +008449 +008450 +008452 +008453 +008454 +008456 +008461 +008462 +008465 +008466 +008467 +008468 +008470 +008472 +008475 +008477 +008478 +008482 +008483 +008484 +008485 +008492 +008494 +008495 +008498 +008499 +008502 +008503 +008506 +008509 +008512 +008513 +008514 +008517 +008518 +008519 +008521 +008522 +008523 +008524 +008526 +008529 +008530 +008533 +008534 +008535 +008536 +008541 +008542 +008549 +008550 +008553 +008556 +008557 +008558 +008559 +008562 +008564 +008568 +008572 +008573 +008576 +008581 +008582 +008584 +008585 +008586 +008587 +008588 +008592 +008595 +008596 +008601 +008602 +008604 +008606 +008607 +008608 +008610 +008612 +008615 +008617 +008618 +008620 +008621 +008624 +008628 +008633 +008635 +008636 +008638 +008639 +008644 +008645 +008647 +008653 +008654 +008655 +008663 +008665 +008667 +008670 +008676 +008680 +008683 +008687 +008688 +008690 +008691 +008692 +008695 +008698 +008699 +008701 +008702 +008706 +008709 +008710 +008713 +008716 +008717 +008718 +008720 +008722 +008723 +008725 +008727 +008728 +008730 +008731 +008732 +008733 +008738 +008739 +008741 +008742 +008744 +008747 +008748 +008749 +008750 +008752 +008753 +008755 +008756 +008757 +008759 +008760 +008764 +008766 +008768 +008769 +008770 +008771 +008772 +008773 +008775 +008776 +008783 +008784 +008790 +008793 +008794 +008796 +008799 +008801 +008805 +008806 +008809 +008810 +008811 +008813 +008814 +008815 +008817 +008819 +008822 +008823 +008826 +008831 +008833 +008835 +008836 +008837 +008838 +008840 +008841 +008843 +008847 +008848 +008849 +008854 +008856 +008858 +008859 +008862 +008865 +008867 +008871 +008872 +008873 +008874 +008876 +008878 +008879 +008880 +008883 +008884 +008885 +008886 +008888 +008890 +008891 +008892 +008900 +008905 +008909 +008911 +008913 +008914 +008917 +008919 +008920 +008921 +008923 +008926 +008927 +008929 +008930 +008931 +008932 +008933 +008936 +008939 +008940 +008942 +008943 +008944 +008948 +008951 +008953 +008955 +008958 +008960 +008961 +008962 +008965 +008966 +008967 +008968 +008969 +008970 +008971 +008973 +008975 +008976 +008978 +008979 +008980 +008982 +008983 +008985 +008987 +008988 +008989 +008995 +008997 +008999 +009000 +009002 +009004 +009005 +009006 +009007 +009015 +009016 +009018 +009019 +009020 +009022 +009024 +009027 +009029 +009032 +009034 +009035 +009036 +009037 +009039 +009042 +009045 +009048 +009049 +009051 +009053 +009058 +009059 +009060 +009063 +009064 +009066 +009068 +009072 +009073 +009078 +009079 +009080 +009085 +009086 +009087 +009089 +009091 +009094 +009098 +009099 +009100 +009105 +009106 +009108 +009112 +009113 +009114 +009116 +009117 +009121 +009123 +009126 +009128 +009129 +009131 +009133 +009136 +009138 +009141 +009144 +009147 +009148 +009150 +009151 +009153 +009155 +009157 +009159 +009160 +009161 +009162 +009163 +009166 +009168 +009173 +009174 +009175 +009177 +009178 +009179 +009180 +009181 +009184 +009185 +009186 +009187 +009189 +009191 +009192 +009193 +009194 +009195 +009196 +009197 +009200 +009202 +009205 +009208 +009209 +009212 +009213 +009214 +009215 +009218 +009221 +009224 +009227 +009230 +009236 +009238 +009239 +009242 +009244 +009245 +009246 +009247 +009249 +009250 +009251 +009252 +009254 +009255 +009259 +009268 +009269 +009270 +009271 +009272 +009273 +009278 +009279 +009281 +009282 +009283 +009285 +009286 +009287 +009288 +009289 +009290 +009291 +009295 +009296 +009299 +009303 +009306 +009307 +009308 +009309 +009312 +009315 +009316 +009318 +009323 +009324 +009325 +009326 +009327 +009330 +009331 +009333 +009334 +009336 +009337 +009339 +009342 +009343 +009347 +009348 +009349 +009350 +009351 +009354 +009358 +009359 +009362 +009365 +009368 +009371 +009373 +009374 +009375 +009377 +009378 +009382 +009386 +009388 +009389 +009392 +009393 +009394 +009398 +009401 +009405 +009406 +009407 +009408 +009409 +009410 +009411 +009412 +009413 +009414 +009417 +009418 +009419 +009420 +009421 +009422 +009424 +009429 +009432 +009433 +009434 +009437 +009438 +009439 +009440 +009443 +009445 +009446 +009448 +009454 +009455 +009456 +009457 +009458 +009459 +009460 +009461 +009463 +009464 +009465 +009466 +009468 +009469 +009470 +009472 +009476 +009477 +009479 +009480 +009481 +009484 +009488 +009490 +009491 +009494 +009496 +009497 +009499 +009500 +009502 +009504 +009507 +009508 +009512 +009515 +009516 +009517 +009518 +009519 +009520 +009523 +009524 +009526 +009527 +009528 +009531 +009532 +009533 +009537 +009540 +009541 +009542 +009543 +009545 +009546 +009549 +009550 +009551 +009557 +009558 +009560 +009562 +009565 +009566 +009567 +009568 +009571 +009573 +009576 +009577 +009579 +009580 +009584 +009585 +009586 +009587 +009588 +009591 +009596 +009597 +009598 +009600 +009603 +009605 +009609 +009611 +009613 +009614 +009615 +009617 +009618 +009619 +009620 +009621 +009623 +009627 +009629 +009634 +009636 +009637 +009638 +009641 +009644 +009647 +009649 +009650 +009654 +009655 +009656 +009658 +009659 +009664 +009666 +009667 +009668 +009670 +009671 +009676 +009678 +009679 +009681 +009684 +009685 +009686 +009687 +009691 +009692 +009693 +009695 +009698 +009699 +009700 +009702 +009703 +009706 +009707 +009709 +009710 +009711 +009712 +009713 +009717 +009718 +009719 +009721 +009724 +009726 +009729 +009732 +009733 +009734 +009735 +009737 +009738 +009743 +009745 +009746 +009747 +009748 +009749 +009754 +009755 +009756 +009758 +009761 +009762 +009763 +009764 +009767 +009772 +009773 +009774 +009776 +009778 +009780 +009781 +009785 +009789 +009790 +009792 +009794 +009796 +009797 +009800 +009801 +009805 +009807 +009808 +009809 +009810 +009813 +009816 +009819 +009822 +009823 +009825 +009828 +009830 +009831 +009832 +009833 +009834 +009836 +009839 +009841 +009842 +009845 +009848 +009851 +009852 +009855 +009858 +009859 +009860 +009862 +009863 +009865 +009867 +009868 +009869 +009870 +009872 +009874 +009877 +009878 +009879 +009880 +009881 +009882 +009884 +009886 +009887 +009894 +009896 +009897 +009898 +009900 +009902 +009904 +009905 +009908 +009911 +009913 +009917 +009918 +009920 +009923 +009926 +009932 +009935 +009938 +009939 +009940 +009942 +009944 +009945 +009946 +009947 +009949 +009950 +009954 +009955 +009958 +009959 +009961 +2008_000002 +2008_000003 +2008_000007 +2008_000008 +2008_000009 +2008_000015 +2008_000016 +2008_000019 +2008_000021 +2008_000023 +2008_000026 +2008_000027 +2008_000028 +2008_000032 +2008_000033 +2008_000034 +2008_000036 +2008_000037 +2008_000041 +2008_000042 +2008_000043 +2008_000045 +2008_000050 +2008_000051 +2008_000052 +2008_000053 +2008_000054 +2008_000056 +2008_000059 +2008_000060 +2008_000062 +2008_000064 +2008_000066 +2008_000067 +2008_000070 +2008_000073 +2008_000074 +2008_000075 +2008_000076 +2008_000078 +2008_000080 +2008_000082 +2008_000084 +2008_000085 +2008_000089 +2008_000090 +2008_000093 +2008_000095 +2008_000096 +2008_000097 +2008_000099 +2008_000103 +2008_000105 +2008_000107 +2008_000109 +2008_000112 +2008_000115 +2008_000116 +2008_000119 +2008_000120 +2008_000123 +2008_000128 +2008_000131 +2008_000132 +2008_000133 +2008_000134 +2008_000138 +2008_000140 +2008_000141 +2008_000142 +2008_000143 +2008_000144 +2008_000145 +2008_000148 +2008_000149 +2008_000151 +2008_000154 +2008_000162 +2008_000163 +2008_000174 +2008_000176 +2008_000177 +2008_000181 +2008_000182 +2008_000183 +2008_000185 +2008_000187 +2008_000188 +2008_000189 +2008_000190 +2008_000191 +2008_000192 +2008_000193 +2008_000194 +2008_000195 +2008_000196 +2008_000197 +2008_000199 +2008_000202 +2008_000203 +2008_000204 +2008_000207 +2008_000213 +2008_000215 +2008_000217 +2008_000219 +2008_000222 +2008_000223 +2008_000226 +2008_000227 +2008_000233 +2008_000234 +2008_000235 +2008_000236 +2008_000237 +2008_000238 +2008_000239 +2008_000243 +2008_000244 +2008_000246 +2008_000251 +2008_000252 +2008_000253 +2008_000254 +2008_000255 +2008_000257 +2008_000259 +2008_000260 +2008_000261 +2008_000262 +2008_000264 +2008_000266 +2008_000268 +2008_000270 +2008_000271 +2008_000272 +2008_000273 +2008_000274 +2008_000275 +2008_000277 +2008_000278 +2008_000281 +2008_000283 +2008_000284 +2008_000287 +2008_000289 +2008_000290 +2008_000291 +2008_000297 +2008_000298 +2008_000304 +2008_000305 +2008_000306 +2008_000307 +2008_000309 +2008_000311 +2008_000313 +2008_000315 +2008_000316 +2008_000318 +2008_000321 +2008_000328 +2008_000330 +2008_000335 +2008_000336 +2008_000338 +2008_000339 +2008_000340 +2008_000342 +2008_000343 +2008_000345 +2008_000346 +2008_000348 +2008_000350 +2008_000354 +2008_000356 +2008_000358 +2008_000359 +2008_000361 +2008_000364 +2008_000365 +2008_000367 +2008_000371 +2008_000373 +2008_000376 +2008_000378 +2008_000380 +2008_000381 +2008_000382 +2008_000383 +2008_000391 +2008_000392 +2008_000393 +2008_000397 +2008_000398 +2008_000399 +2008_000400 +2008_000401 +2008_000403 +2008_000405 +2008_000406 +2008_000407 +2008_000408 +2008_000413 +2008_000414 +2008_000415 +2008_000416 +2008_000418 +2008_000419 +2008_000421 +2008_000422 +2008_000423 +2008_000424 +2008_000426 +2008_000428 +2008_000432 +2008_000435 +2008_000436 +2008_000437 +2008_000442 +2008_000443 +2008_000445 +2008_000446 +2008_000447 +2008_000448 +2008_000452 +2008_000455 +2008_000457 +2008_000461 +2008_000464 +2008_000465 +2008_000466 +2008_000469 +2008_000470 +2008_000471 +2008_000472 +2008_000473 +2008_000474 +2008_000475 +2008_000480 +2008_000481 +2008_000488 +2008_000489 +2008_000491 +2008_000492 +2008_000493 +2008_000495 +2008_000496 +2008_000498 +2008_000499 +2008_000501 +2008_000502 +2008_000505 +2008_000510 +2008_000511 +2008_000512 +2008_000514 +2008_000515 +2008_000516 +2008_000519 +2008_000522 +2008_000527 +2008_000531 +2008_000532 +2008_000533 +2008_000535 +2008_000536 +2008_000540 +2008_000541 +2008_000544 +2008_000545 +2008_000547 +2008_000548 +2008_000552 +2008_000553 +2008_000558 +2008_000559 +2008_000561 +2008_000562 +2008_000563 +2008_000564 +2008_000566 +2008_000567 +2008_000568 +2008_000569 +2008_000572 +2008_000573 +2008_000578 +2008_000579 +2008_000581 +2008_000583 +2008_000584 +2008_000585 +2008_000588 +2008_000589 +2008_000595 +2008_000599 +2008_000602 +2008_000605 +2008_000607 +2008_000609 +2008_000613 +2008_000614 +2008_000615 +2008_000619 +2008_000620 +2008_000622 +2008_000623 +2008_000626 +2008_000628 +2008_000629 +2008_000630 +2008_000634 +2008_000636 +2008_000640 +2008_000641 +2008_000645 +2008_000646 +2008_000647 +2008_000648 +2008_000650 +2008_000652 +2008_000655 +2008_000656 +2008_000657 +2008_000659 +2008_000660 +2008_000661 +2008_000662 +2008_000666 +2008_000669 +2008_000670 +2008_000672 +2008_000673 +2008_000674 +2008_000676 +2008_000677 +2008_000678 +2008_000683 +2008_000689 +2008_000690 +2008_000691 +2008_000694 +2008_000695 +2008_000696 +2008_000697 +2008_000699 +2008_000700 +2008_000703 +2008_000704 +2008_000705 +2008_000706 +2008_000711 +2008_000714 +2008_000716 +2008_000719 +2008_000721 +2008_000723 +2008_000724 +2008_000725 +2008_000726 +2008_000727 +2008_000729 +2008_000731 +2008_000732 +2008_000733 +2008_000734 +2008_000737 +2008_000740 +2008_000742 +2008_000745 +2008_000748 +2008_000753 +2008_000756 +2008_000758 +2008_000760 +2008_000761 +2008_000764 +2008_000765 +2008_000769 +2008_000775 +2008_000776 +2008_000777 +2008_000778 +2008_000780 +2008_000782 +2008_000783 +2008_000785 +2008_000787 +2008_000788 +2008_000790 +2008_000792 +2008_000793 +2008_000795 +2008_000796 +2008_000798 +2008_000801 +2008_000803 +2008_000804 +2008_000805 +2008_000806 +2008_000808 +2008_000811 +2008_000814 +2008_000815 +2008_000817 +2008_000824 +2008_000825 +2008_000828 +2008_000829 +2008_000832 +2008_000833 +2008_000834 +2008_000835 +2008_000837 +2008_000839 +2008_000841 +2008_000842 +2008_000844 +2008_000847 +2008_000848 +2008_000851 +2008_000853 +2008_000854 +2008_000857 +2008_000858 +2008_000860 +2008_000861 +2008_000863 +2008_000864 +2008_000867 +2008_000868 +2008_000870 +2008_000873 +2008_000875 +2008_000876 +2008_000878 +2008_000880 +2008_000881 +2008_000883 +2008_000884 +2008_000885 +2008_000887 +2008_000897 +2008_000899 +2008_000901 +2008_000902 +2008_000904 +2008_000905 +2008_000908 +2008_000910 +2008_000911 +2008_000912 +2008_000914 +2008_000915 +2008_000916 +2008_000917 +2008_000919 +2008_000922 +2008_000923 +2008_000924 +2008_000928 +2008_000931 +2008_000934 +2008_000936 +2008_000939 +2008_000940 +2008_000941 +2008_000942 +2008_000943 +2008_000944 +2008_000950 +2008_000952 +2008_000953 +2008_000956 +2008_000957 +2008_000959 +2008_000960 +2008_000964 +2008_000965 +2008_000970 +2008_000971 +2008_000972 +2008_000973 +2008_000976 +2008_000979 +2008_000981 +2008_000982 +2008_000984 +2008_000985 +2008_000987 +2008_000992 +2008_000993 +2008_000999 +2008_001004 +2008_001007 +2008_001009 +2008_001012 +2008_001013 +2008_001018 +2008_001020 +2008_001021 +2008_001022 +2008_001023 +2008_001024 +2008_001026 +2008_001028 +2008_001030 +2008_001031 +2008_001034 +2008_001035 +2008_001036 +2008_001039 +2008_001040 +2008_001041 +2008_001042 +2008_001046 +2008_001047 +2008_001048 +2008_001052 +2008_001054 +2008_001055 +2008_001056 +2008_001057 +2008_001060 +2008_001062 +2008_001063 +2008_001066 +2008_001068 +2008_001070 +2008_001071 +2008_001073 +2008_001074 +2008_001075 +2008_001076 +2008_001077 +2008_001078 +2008_001080 +2008_001081 +2008_001083 +2008_001089 +2008_001090 +2008_001092 +2008_001098 +2008_001099 +2008_001104 +2008_001105 +2008_001106 +2008_001111 +2008_001112 +2008_001113 +2008_001114 +2008_001115 +2008_001118 +2008_001119 +2008_001120 +2008_001121 +2008_001122 +2008_001130 +2008_001133 +2008_001134 +2008_001135 +2008_001136 +2008_001137 +2008_001139 +2008_001140 +2008_001142 +2008_001143 +2008_001147 +2008_001150 +2008_001154 +2008_001155 +2008_001158 +2008_001159 +2008_001160 +2008_001161 +2008_001164 +2008_001166 +2008_001167 +2008_001168 +2008_001169 +2008_001170 +2008_001171 +2008_001177 +2008_001182 +2008_001183 +2008_001185 +2008_001188 +2008_001189 +2008_001190 +2008_001192 +2008_001194 +2008_001196 +2008_001199 +2008_001202 +2008_001203 +2008_001205 +2008_001206 +2008_001208 +2008_001210 +2008_001215 +2008_001218 +2008_001219 +2008_001220 +2008_001221 +2008_001223 +2008_001225 +2008_001226 +2008_001227 +2008_001230 +2008_001231 +2008_001235 +2008_001236 +2008_001238 +2008_001241 +2008_001245 +2008_001248 +2008_001249 +2008_001255 +2008_001257 +2008_001260 +2008_001262 +2008_001263 +2008_001264 +2008_001267 +2008_001271 +2008_001272 +2008_001274 +2008_001275 +2008_001278 +2008_001283 +2008_001284 +2008_001285 +2008_001290 +2008_001294 +2008_001296 +2008_001299 +2008_001301 +2008_001302 +2008_001304 +2008_001306 +2008_001307 +2008_001308 +2008_001310 +2008_001312 +2008_001314 +2008_001318 +2008_001320 +2008_001322 +2008_001325 +2008_001329 +2008_001333 +2008_001334 +2008_001335 +2008_001336 +2008_001338 +2008_001340 +2008_001344 +2008_001346 +2008_001349 +2008_001350 +2008_001351 +2008_001353 +2008_001356 +2008_001357 +2008_001358 +2008_001359 +2008_001366 +2008_001367 +2008_001369 +2008_001373 +2008_001374 +2008_001375 +2008_001376 +2008_001379 +2008_001380 +2008_001382 +2008_001383 +2008_001385 +2008_001387 +2008_001388 +2008_001389 +2008_001390 +2008_001391 +2008_001395 +2008_001399 +2008_001401 +2008_001402 +2008_001404 +2008_001405 +2008_001406 +2008_001408 +2008_001410 +2008_001413 +2008_001414 +2008_001415 +2008_001419 +2008_001420 +2008_001427 +2008_001428 +2008_001429 +2008_001430 +2008_001431 +2008_001432 +2008_001433 +2008_001434 +2008_001436 +2008_001437 +2008_001439 +2008_001440 +2008_001444 +2008_001445 +2008_001446 +2008_001448 +2008_001451 +2008_001454 +2008_001455 +2008_001456 +2008_001460 +2008_001461 +2008_001462 +2008_001464 +2008_001466 +2008_001467 +2008_001468 +2008_001470 +2008_001475 +2008_001478 +2008_001479 +2008_001481 +2008_001482 +2008_001486 +2008_001488 +2008_001491 +2008_001493 +2008_001494 +2008_001495 +2008_001498 +2008_001500 +2008_001501 +2008_001503 +2008_001504 +2008_001510 +2008_001513 +2008_001514 +2008_001516 +2008_001520 +2008_001522 +2008_001523 +2008_001525 +2008_001527 +2008_001529 +2008_001531 +2008_001533 +2008_001534 +2008_001536 +2008_001538 +2008_001539 +2008_001540 +2008_001541 +2008_001542 +2008_001543 +2008_001544 +2008_001546 +2008_001547 +2008_001549 +2008_001550 +2008_001551 +2008_001553 +2008_001563 +2008_001564 +2008_001566 +2008_001574 +2008_001575 +2008_001576 +2008_001577 +2008_001580 +2008_001582 +2008_001586 +2008_001589 +2008_001590 +2008_001591 +2008_001592 +2008_001593 +2008_001594 +2008_001596 +2008_001598 +2008_001601 +2008_001602 +2008_001605 +2008_001607 +2008_001609 +2008_001610 +2008_001613 +2008_001615 +2008_001617 +2008_001619 +2008_001620 +2008_001622 +2008_001624 +2008_001625 +2008_001626 +2008_001629 +2008_001631 +2008_001632 +2008_001636 +2008_001638 +2008_001640 +2008_001641 +2008_001643 +2008_001645 +2008_001648 +2008_001649 +2008_001652 +2008_001653 +2008_001655 +2008_001659 +2008_001660 +2008_001661 +2008_001663 +2008_001666 +2008_001667 +2008_001668 +2008_001669 +2008_001670 +2008_001673 +2008_001676 +2008_001679 +2008_001680 +2008_001681 +2008_001682 +2008_001688 +2008_001690 +2008_001691 +2008_001692 +2008_001694 +2008_001697 +2008_001699 +2008_001702 +2008_001704 +2008_001706 +2008_001708 +2008_001709 +2008_001710 +2008_001712 +2008_001714 +2008_001715 +2008_001716 +2008_001717 +2008_001719 +2008_001722 +2008_001723 +2008_001724 +2008_001727 +2008_001729 +2008_001730 +2008_001731 +2008_001735 +2008_001736 +2008_001737 +2008_001741 +2008_001742 +2008_001744 +2008_001745 +2008_001746 +2008_001750 +2008_001751 +2008_001757 +2008_001758 +2008_001761 +2008_001763 +2008_001764 +2008_001765 +2008_001769 +2008_001770 +2008_001772 +2008_001773 +2008_001774 +2008_001775 +2008_001781 +2008_001782 +2008_001783 +2008_001784 +2008_001787 +2008_001789 +2008_001791 +2008_001792 +2008_001796 +2008_001797 +2008_001799 +2008_001801 +2008_001802 +2008_001805 +2008_001806 +2008_001808 +2008_001809 +2008_001810 +2008_001811 +2008_001812 +2008_001813 +2008_001814 +2008_001815 +2008_001816 +2008_001820 +2008_001821 +2008_001823 +2008_001825 +2008_001829 +2008_001830 +2008_001832 +2008_001834 +2008_001836 +2008_001837 +2008_001838 +2008_001841 +2008_001842 +2008_001843 +2008_001845 +2008_001849 +2008_001850 +2008_001852 +2008_001854 +2008_001856 +2008_001858 +2008_001860 +2008_001862 +2008_001863 +2008_001865 +2008_001866 +2008_001867 +2008_001869 +2008_001871 +2008_001872 +2008_001874 +2008_001876 +2008_001880 +2008_001881 +2008_001882 +2008_001885 +2008_001888 +2008_001894 +2008_001895 +2008_001896 +2008_001899 +2008_001903 +2008_001905 +2008_001907 +2008_001908 +2008_001909 +2008_001910 +2008_001911 +2008_001914 +2008_001919 +2008_001920 +2008_001921 +2008_001926 +2008_001928 +2008_001929 +2008_001930 +2008_001932 +2008_001934 +2008_001937 +2008_001941 +2008_001945 +2008_001946 +2008_001947 +2008_001951 +2008_001955 +2008_001956 +2008_001957 +2008_001958 +2008_001961 +2008_001965 +2008_001966 +2008_001967 +2008_001969 +2008_001970 +2008_001971 +2008_001977 +2008_001978 +2008_001979 +2008_001980 +2008_001982 +2008_001985 +2008_001986 +2008_001987 +2008_001989 +2008_001992 +2008_001997 +2008_001998 +2008_002000 +2008_002001 +2008_002002 +2008_002003 +2008_002004 +2008_002005 +2008_002007 +2008_002009 +2008_002011 +2008_002013 +2008_002017 +2008_002021 +2008_002023 +2008_002026 +2008_002031 +2008_002032 +2008_002033 +2008_002035 +2008_002036 +2008_002037 +2008_002039 +2008_002042 +2008_002043 +2008_002045 +2008_002046 +2008_002047 +2008_002052 +2008_002056 +2008_002058 +2008_002061 +2008_002062 +2008_002064 +2008_002066 +2008_002067 +2008_002069 +2008_002071 +2008_002073 +2008_002079 +2008_002080 +2008_002082 +2008_002084 +2008_002086 +2008_002088 +2008_002092 +2008_002093 +2008_002094 +2008_002096 +2008_002098 +2008_002099 +2008_002103 +2008_002107 +2008_002112 +2008_002113 +2008_002114 +2008_002115 +2008_002116 +2008_002117 +2008_002118 +2008_002119 +2008_002123 +2008_002124 +2008_002129 +2008_002131 +2008_002132 +2008_002138 +2008_002140 +2008_002144 +2008_002145 +2008_002146 +2008_002148 +2008_002150 +2008_002151 +2008_002152 +2008_002153 +2008_002155 +2008_002156 +2008_002158 +2008_002160 +2008_002162 +2008_002167 +2008_002169 +2008_002172 +2008_002175 +2008_002176 +2008_002177 +2008_002179 +2008_002181 +2008_002182 +2008_002185 +2008_002191 +2008_002193 +2008_002194 +2008_002195 +2008_002197 +2008_002198 +2008_002199 +2008_002200 +2008_002201 +2008_002202 +2008_002204 +2008_002205 +2008_002206 +2008_002207 +2008_002208 +2008_002209 +2008_002210 +2008_002212 +2008_002215 +2008_002218 +2008_002220 +2008_002221 +2008_002222 +2008_002223 +2008_002225 +2008_002227 +2008_002229 +2008_002231 +2008_002234 +2008_002236 +2008_002239 +2008_002240 +2008_002241 +2008_002243 +2008_002244 +2008_002247 +2008_002248 +2008_002250 +2008_002251 +2008_002255 +2008_002258 +2008_002259 +2008_002262 +2008_002267 +2008_002269 +2008_002270 +2008_002272 +2008_002273 +2008_002278 +2008_002279 +2008_002280 +2008_002281 +2008_002283 +2008_002288 +2008_002292 +2008_002293 +2008_002294 +2008_002296 +2008_002298 +2008_002299 +2008_002304 +2008_002305 +2008_002307 +2008_002311 +2008_002312 +2008_002314 +2008_002317 +2008_002321 +2008_002322 +2008_002324 +2008_002325 +2008_002327 +2008_002328 +2008_002329 +2008_002330 +2008_002331 +2008_002335 +2008_002338 +2008_002340 +2008_002343 +2008_002344 +2008_002347 +2008_002349 +2008_002350 +2008_002356 +2008_002357 +2008_002358 +2008_002359 +2008_002361 +2008_002362 +2008_002365 +2008_002366 +2008_002368 +2008_002369 +2008_002370 +2008_002372 +2008_002374 +2008_002377 +2008_002378 +2008_002379 +2008_002383 +2008_002384 +2008_002389 +2008_002395 +2008_002399 +2008_002401 +2008_002403 +2008_002404 +2008_002405 +2008_002408 +2008_002410 +2008_002411 +2008_002412 +2008_002414 +2008_002418 +2008_002419 +2008_002422 +2008_002424 +2008_002425 +2008_002428 +2008_002429 +2008_002430 +2008_002434 +2008_002436 +2008_002437 +2008_002438 +2008_002439 +2008_002441 +2008_002442 +2008_002444 +2008_002445 +2008_002446 +2008_002448 +2008_002451 +2008_002452 +2008_002454 +2008_002456 +2008_002457 +2008_002458 +2008_002459 +2008_002461 +2008_002464 +2008_002465 +2008_002466 +2008_002467 +2008_002470 +2008_002471 +2008_002473 +2008_002477 +2008_002481 +2008_002482 +2008_002483 +2008_002484 +2008_002485 +2008_002487 +2008_002491 +2008_002492 +2008_002494 +2008_002495 +2008_002499 +2008_002501 +2008_002502 +2008_002504 +2008_002506 +2008_002508 +2008_002509 +2008_002510 +2008_002512 +2008_002514 +2008_002515 +2008_002516 +2008_002521 +2008_002523 +2008_002524 +2008_002526 +2008_002527 +2008_002533 +2008_002536 +2008_002540 +2008_002541 +2008_002542 +2008_002543 +2008_002547 +2008_002549 +2008_002551 +2008_002555 +2008_002558 +2008_002562 +2008_002564 +2008_002566 +2008_002567 +2008_002568 +2008_002574 +2008_002575 +2008_002576 +2008_002578 +2008_002579 +2008_002583 +2008_002584 +2008_002588 +2008_002589 +2008_002590 +2008_002597 +2008_002598 +2008_002599 +2008_002601 +2008_002603 +2008_002606 +2008_002610 +2008_002612 +2008_002613 +2008_002616 +2008_002621 +2008_002622 +2008_002623 +2008_002624 +2008_002625 +2008_002631 +2008_002634 +2008_002638 +2008_002639 +2008_002640 +2008_002641 +2008_002643 +2008_002645 +2008_002647 +2008_002648 +2008_002649 +2008_002650 +2008_002652 +2008_002653 +2008_002662 +2008_002665 +2008_002666 +2008_002668 +2008_002670 +2008_002672 +2008_002673 +2008_002674 +2008_002675 +2008_002676 +2008_002677 +2008_002678 +2008_002679 +2008_002680 +2008_002681 +2008_002682 +2008_002684 +2008_002686 +2008_002687 +2008_002696 +2008_002697 +2008_002698 +2008_002700 +2008_002701 +2008_002704 +2008_002705 +2008_002709 +2008_002710 +2008_002712 +2008_002714 +2008_002715 +2008_002716 +2008_002718 +2008_002719 +2008_002720 +2008_002725 +2008_002728 +2008_002730 +2008_002732 +2008_002733 +2008_002735 +2008_002736 +2008_002738 +2008_002741 +2008_002746 +2008_002749 +2008_002750 +2008_002751 +2008_002752 +2008_002753 +2008_002756 +2008_002758 +2008_002760 +2008_002762 +2008_002766 +2008_002767 +2008_002768 +2008_002772 +2008_002773 +2008_002774 +2008_002775 +2008_002776 +2008_002778 +2008_002783 +2008_002784 +2008_002787 +2008_002789 +2008_002791 +2008_002792 +2008_002793 +2008_002794 +2008_002795 +2008_002801 +2008_002804 +2008_002806 +2008_002808 +2008_002809 +2008_002811 +2008_002813 +2008_002814 +2008_002817 +2008_002820 +2008_002823 +2008_002826 +2008_002829 +2008_002830 +2008_002831 +2008_002834 +2008_002835 +2008_002838 +2008_002842 +2008_002843 +2008_002845 +2008_002847 +2008_002848 +2008_002850 +2008_002852 +2008_002854 +2008_002856 +2008_002857 +2008_002859 +2008_002860 +2008_002864 +2008_002866 +2008_002868 +2008_002869 +2008_002870 +2008_002872 +2008_002873 +2008_002875 +2008_002876 +2008_002879 +2008_002880 +2008_002882 +2008_002883 +2008_002885 +2008_002887 +2008_002890 +2008_002891 +2008_002892 +2008_002894 +2008_002897 +2008_002899 +2008_002900 +2008_002903 +2008_002904 +2008_002906 +2008_002908 +2008_002909 +2008_002910 +2008_002913 +2008_002916 +2008_002917 +2008_002920 +2008_002922 +2008_002926 +2008_002929 +2008_002930 +2008_002931 +2008_002932 +2008_002936 +2008_002942 +2008_002943 +2008_002946 +2008_002947 +2008_002948 +2008_002951 +2008_002954 +2008_002955 +2008_002956 +2008_002957 +2008_002958 +2008_002960 +2008_002961 +2008_002965 +2008_002966 +2008_002968 +2008_002970 +2008_002971 +2008_002972 +2008_002973 +2008_002977 +2008_002983 +2008_002984 +2008_002985 +2008_002988 +2008_002992 +2008_002993 +2008_002997 +2008_002999 +2008_003001 +2008_003003 +2008_003005 +2008_003008 +2008_003013 +2008_003015 +2008_003017 +2008_003018 +2008_003020 +2008_003021 +2008_003022 +2008_003023 +2008_003025 +2008_003026 +2008_003030 +2008_003033 +2008_003034 +2008_003037 +2008_003039 +2008_003041 +2008_003043 +2008_003045 +2008_003048 +2008_003049 +2008_003051 +2008_003052 +2008_003053 +2008_003055 +2008_003056 +2008_003057 +2008_003059 +2008_003060 +2008_003061 +2008_003062 +2008_003063 +2008_003065 +2008_003067 +2008_003068 +2008_003072 +2008_003073 +2008_003075 +2008_003076 +2008_003079 +2008_003081 +2008_003082 +2008_003083 +2008_003087 +2008_003088 +2008_003089 +2008_003090 +2008_003093 +2008_003094 +2008_003095 +2008_003099 +2008_003100 +2008_003101 +2008_003104 +2008_003105 +2008_003106 +2008_003107 +2008_003108 +2008_003110 +2008_003112 +2008_003114 +2008_003120 +2008_003122 +2008_003127 +2008_003128 +2008_003132 +2008_003133 +2008_003134 +2008_003135 +2008_003136 +2008_003140 +2008_003141 +2008_003143 +2008_003144 +2008_003146 +2008_003147 +2008_003151 +2008_003152 +2008_003154 +2008_003155 +2008_003157 +2008_003160 +2008_003161 +2008_003167 +2008_003168 +2008_003170 +2008_003178 +2008_003180 +2008_003181 +2008_003182 +2008_003186 +2008_003187 +2008_003189 +2008_003191 +2008_003193 +2008_003196 +2008_003200 +2008_003202 +2008_003203 +2008_003205 +2008_003208 +2008_003209 +2008_003210 +2008_003211 +2008_003213 +2008_003220 +2008_003222 +2008_003224 +2008_003225 +2008_003228 +2008_003231 +2008_003232 +2008_003238 +2008_003239 +2008_003242 +2008_003244 +2008_003245 +2008_003248 +2008_003249 +2008_003251 +2008_003252 +2008_003255 +2008_003256 +2008_003261 +2008_003263 +2008_003264 +2008_003265 +2008_003266 +2008_003269 +2008_003270 +2008_003271 +2008_003272 +2008_003275 +2008_003276 +2008_003277 +2008_003278 +2008_003280 +2008_003283 +2008_003286 +2008_003287 +2008_003288 +2008_003289 +2008_003290 +2008_003291 +2008_003295 +2008_003297 +2008_003300 +2008_003302 +2008_003303 +2008_003304 +2008_003305 +2008_003311 +2008_003313 +2008_003316 +2008_003318 +2008_003320 +2008_003321 +2008_003323 +2008_003326 +2008_003329 +2008_003330 +2008_003331 +2008_003333 +2008_003334 +2008_003335 +2008_003336 +2008_003338 +2008_003342 +2008_003343 +2008_003344 +2008_003347 +2008_003348 +2008_003350 +2008_003351 +2008_003359 +2008_003360 +2008_003361 +2008_003362 +2008_003369 +2008_003373 +2008_003374 +2008_003378 +2008_003379 +2008_003380 +2008_003381 +2008_003382 +2008_003384 +2008_003386 +2008_003393 +2008_003394 +2008_003395 +2008_003402 +2008_003405 +2008_003406 +2008_003407 +2008_003409 +2008_003414 +2008_003415 +2008_003417 +2008_003418 +2008_003420 +2008_003423 +2008_003424 +2008_003426 +2008_003429 +2008_003430 +2008_003432 +2008_003433 +2008_003434 +2008_003435 +2008_003437 +2008_003439 +2008_003442 +2008_003443 +2008_003447 +2008_003448 +2008_003449 +2008_003451 +2008_003452 +2008_003453 +2008_003458 +2008_003461 +2008_003462 +2008_003463 +2008_003464 +2008_003466 +2008_003467 +2008_003469 +2008_003472 +2008_003475 +2008_003476 +2008_003477 +2008_003478 +2008_003479 +2008_003480 +2008_003482 +2008_003483 +2008_003484 +2008_003485 +2008_003488 +2008_003489 +2008_003492 +2008_003493 +2008_003496 +2008_003497 +2008_003498 +2008_003499 +2008_003500 +2008_003501 +2008_003504 +2008_003507 +2008_003510 +2008_003511 +2008_003514 +2008_003515 +2008_003519 +2008_003520 +2008_003521 +2008_003522 +2008_003523 +2008_003524 +2008_003526 +2008_003531 +2008_003533 +2008_003534 +2008_003542 +2008_003544 +2008_003545 +2008_003546 +2008_003547 +2008_003552 +2008_003557 +2008_003559 +2008_003560 +2008_003562 +2008_003565 +2008_003571 +2008_003572 +2008_003575 +2008_003576 +2008_003577 +2008_003578 +2008_003579 +2008_003580 +2008_003582 +2008_003585 +2008_003587 +2008_003589 +2008_003590 +2008_003591 +2008_003592 +2008_003593 +2008_003596 +2008_003598 +2008_003604 +2008_003607 +2008_003608 +2008_003609 +2008_003610 +2008_003611 +2008_003613 +2008_003617 +2008_003618 +2008_003619 +2008_003621 +2008_003622 +2008_003624 +2008_003626 +2008_003629 +2008_003635 +2008_003636 +2008_003637 +2008_003638 +2008_003645 +2008_003647 +2008_003650 +2008_003652 +2008_003653 +2008_003655 +2008_003658 +2008_003659 +2008_003662 +2008_003665 +2008_003667 +2008_003671 +2008_003672 +2008_003673 +2008_003674 +2008_003675 +2008_003676 +2008_003677 +2008_003680 +2008_003681 +2008_003682 +2008_003683 +2008_003684 +2008_003685 +2008_003688 +2008_003689 +2008_003691 +2008_003694 +2008_003697 +2008_003701 +2008_003703 +2008_003704 +2008_003706 +2008_003707 +2008_003709 +2008_003712 +2008_003713 +2008_003718 +2008_003719 +2008_003720 +2008_003721 +2008_003722 +2008_003726 +2008_003729 +2008_003732 +2008_003733 +2008_003737 +2008_003743 +2008_003744 +2008_003745 +2008_003746 +2008_003748 +2008_003749 +2008_003753 +2008_003754 +2008_003755 +2008_003756 +2008_003761 +2008_003762 +2008_003763 +2008_003764 +2008_003766 +2008_003767 +2008_003768 +2008_003769 +2008_003772 +2008_003773 +2008_003774 +2008_003775 +2008_003776 +2008_003777 +2008_003779 +2008_003780 +2008_003781 +2008_003782 +2008_003788 +2008_003789 +2008_003791 +2008_003793 +2008_003794 +2008_003796 +2008_003799 +2008_003800 +2008_003801 +2008_003802 +2008_003805 +2008_003811 +2008_003812 +2008_003813 +2008_003814 +2008_003815 +2008_003819 +2008_003820 +2008_003821 +2008_003825 +2008_003826 +2008_003827 +2008_003829 +2008_003830 +2008_003831 +2008_003835 +2008_003838 +2008_003840 +2008_003841 +2008_003842 +2008_003843 +2008_003844 +2008_003846 +2008_003847 +2008_003849 +2008_003852 +2008_003854 +2008_003856 +2008_003858 +2008_003860 +2008_003864 +2008_003866 +2008_003868 +2008_003870 +2008_003871 +2008_003873 +2008_003874 +2008_003876 +2008_003881 +2008_003882 +2008_003883 +2008_003884 +2008_003885 +2008_003886 +2008_003888 +2008_003891 +2008_003892 +2008_003894 +2008_003904 +2008_003905 +2008_003908 +2008_003913 +2008_003914 +2008_003915 +2008_003916 +2008_003920 +2008_003921 +2008_003922 +2008_003924 +2008_003925 +2008_003926 +2008_003929 +2008_003932 +2008_003933 +2008_003939 +2008_003940 +2008_003941 +2008_003942 +2008_003943 +2008_003944 +2008_003945 +2008_003947 +2008_003948 +2008_003951 +2008_003956 +2008_003958 +2008_003962 +2008_003965 +2008_003966 +2008_003967 +2008_003969 +2008_003970 +2008_003971 +2008_003974 +2008_003975 +2008_003976 +2008_003978 +2008_003983 +2008_003984 +2008_003985 +2008_003986 +2008_003988 +2008_003989 +2008_003992 +2008_003995 +2008_003996 +2008_003997 +2008_003998 +2008_004000 +2008_004002 +2008_004003 +2008_004004 +2008_004006 +2008_004007 +2008_004008 +2008_004014 +2008_004015 +2008_004016 +2008_004017 +2008_004018 +2008_004020 +2008_004021 +2008_004022 +2008_004024 +2008_004026 +2008_004027 +2008_004030 +2008_004036 +2008_004037 +2008_004040 +2008_004042 +2008_004044 +2008_004045 +2008_004046 +2008_004048 +2008_004053 +2008_004054 +2008_004055 +2008_004056 +2008_004058 +2008_004064 +2008_004066 +2008_004069 +2008_004071 +2008_004074 +2008_004075 +2008_004076 +2008_004077 +2008_004080 +2008_004081 +2008_004084 +2008_004087 +2008_004088 +2008_004090 +2008_004092 +2008_004093 +2008_004097 +2008_004100 +2008_004101 +2008_004102 +2008_004103 +2008_004105 +2008_004106 +2008_004110 +2008_004112 +2008_004113 +2008_004119 +2008_004120 +2008_004121 +2008_004122 +2008_004123 +2008_004124 +2008_004125 +2008_004126 +2008_004127 +2008_004130 +2008_004134 +2008_004135 +2008_004137 +2008_004138 +2008_004140 +2008_004142 +2008_004145 +2008_004147 +2008_004148 +2008_004155 +2008_004161 +2008_004163 +2008_004165 +2008_004166 +2008_004171 +2008_004174 +2008_004175 +2008_004176 +2008_004178 +2008_004182 +2008_004188 +2008_004189 +2008_004190 +2008_004195 +2008_004196 +2008_004198 +2008_004201 +2008_004203 +2008_004205 +2008_004208 +2008_004212 +2008_004213 +2008_004214 +2008_004216 +2008_004217 +2008_004218 +2008_004221 +2008_004224 +2008_004230 +2008_004231 +2008_004232 +2008_004234 +2008_004235 +2008_004239 +2008_004242 +2008_004243 +2008_004245 +2008_004246 +2008_004247 +2008_004251 +2008_004257 +2008_004258 +2008_004259 +2008_004263 +2008_004265 +2008_004269 +2008_004270 +2008_004271 +2008_004273 +2008_004274 +2008_004276 +2008_004278 +2008_004279 +2008_004280 +2008_004284 +2008_004287 +2008_004288 +2008_004289 +2008_004290 +2008_004291 +2008_004292 +2008_004293 +2008_004296 +2008_004297 +2008_004301 +2008_004303 +2008_004306 +2008_004307 +2008_004308 +2008_004312 +2008_004313 +2008_004314 +2008_004317 +2008_004318 +2008_004319 +2008_004321 +2008_004324 +2008_004325 +2008_004326 +2008_004327 +2008_004328 +2008_004330 +2008_004331 +2008_004333 +2008_004339 +2008_004342 +2008_004344 +2008_004345 +2008_004347 +2008_004348 +2008_004353 +2008_004354 +2008_004357 +2008_004358 +2008_004361 +2008_004362 +2008_004363 +2008_004365 +2008_004367 +2008_004371 +2008_004372 +2008_004374 +2008_004376 +2008_004378 +2008_004380 +2008_004384 +2008_004385 +2008_004387 +2008_004389 +2008_004391 +2008_004394 +2008_004396 +2008_004398 +2008_004399 +2008_004402 +2008_004403 +2008_004406 +2008_004408 +2008_004410 +2008_004411 +2008_004412 +2008_004414 +2008_004416 +2008_004417 +2008_004418 +2008_004419 +2008_004422 +2008_004425 +2008_004426 +2008_004427 +2008_004428 +2008_004430 +2008_004431 +2008_004433 +2008_004435 +2008_004436 +2008_004438 +2008_004439 +2008_004441 +2008_004443 +2008_004445 +2008_004450 +2008_004452 +2008_004453 +2008_004455 +2008_004457 +2008_004458 +2008_004459 +2008_004460 +2008_004462 +2008_004464 +2008_004469 +2008_004470 +2008_004471 +2008_004476 +2008_004477 +2008_004478 +2008_004479 +2008_004480 +2008_004482 +2008_004487 +2008_004488 +2008_004490 +2008_004492 +2008_004493 +2008_004497 +2008_004498 +2008_004499 +2008_004501 +2008_004502 +2008_004504 +2008_004505 +2008_004506 +2008_004510 +2008_004512 +2008_004513 +2008_004515 +2008_004518 +2008_004519 +2008_004520 +2008_004522 +2008_004525 +2008_004526 +2008_004528 +2008_004532 +2008_004533 +2008_004534 +2008_004538 +2008_004539 +2008_004540 +2008_004541 +2008_004544 +2008_004545 +2008_004546 +2008_004547 +2008_004549 +2008_004550 +2008_004551 +2008_004552 +2008_004553 +2008_004554 +2008_004559 +2008_004564 +2008_004567 +2008_004568 +2008_004570 +2008_004574 +2008_004575 +2008_004579 +2008_004581 +2008_004583 +2008_004584 +2008_004585 +2008_004588 +2008_004589 +2008_004590 +2008_004592 +2008_004593 +2008_004599 +2008_004602 +2008_004603 +2008_004605 +2008_004606 +2008_004607 +2008_004610 +2008_004611 +2008_004612 +2008_004613 +2008_004614 +2008_004615 +2008_004616 +2008_004617 +2008_004619 +2008_004620 +2008_004621 +2008_004624 +2008_004629 +2008_004630 +2008_004631 +2008_004632 +2008_004633 +2008_004634 +2008_004635 +2008_004636 +2008_004640 +2008_004646 +2008_004647 +2008_004648 +2008_004649 +2008_004653 +2008_004654 +2008_004656 +2008_004659 +2008_004661 +2008_004662 +2008_004663 +2008_004665 +2008_004666 +2008_004667 +2008_004668 +2008_004670 +2008_004671 +2008_004672 +2008_004677 +2008_004678 +2008_004679 +2008_004684 +2008_004687 +2008_004688 +2008_004689 +2008_004690 +2008_004692 +2008_004695 +2008_004696 +2008_004697 +2008_004701 +2008_004702 +2008_004703 +2008_004704 +2008_004705 +2008_004706 +2008_004707 +2008_004711 +2008_004713 +2008_004716 +2008_004718 +2008_004719 +2008_004720 +2008_004722 +2008_004725 +2008_004726 +2008_004729 +2008_004730 +2008_004732 +2008_004736 +2008_004739 +2008_004740 +2008_004742 +2008_004745 +2008_004749 +2008_004750 +2008_004752 +2008_004754 +2008_004756 +2008_004758 +2008_004760 +2008_004763 +2008_004764 +2008_004766 +2008_004767 +2008_004768 +2008_004770 +2008_004771 +2008_004774 +2008_004776 +2008_004777 +2008_004778 +2008_004781 +2008_004783 +2008_004784 +2008_004786 +2008_004794 +2008_004795 +2008_004797 +2008_004802 +2008_004804 +2008_004805 +2008_004807 +2008_004808 +2008_004812 +2008_004814 +2008_004819 +2008_004821 +2008_004822 +2008_004825 +2008_004827 +2008_004832 +2008_004833 +2008_004834 +2008_004837 +2008_004838 +2008_004841 +2008_004844 +2008_004845 +2008_004847 +2008_004849 +2008_004850 +2008_004851 +2008_004852 +2008_004854 +2008_004856 +2008_004858 +2008_004862 +2008_004866 +2008_004868 +2008_004869 +2008_004872 +2008_004873 +2008_004874 +2008_004875 +2008_004876 +2008_004881 +2008_004885 +2008_004887 +2008_004892 +2008_004893 +2008_004894 +2008_004896 +2008_004898 +2008_004899 +2008_004900 +2008_004903 +2008_004904 +2008_004907 +2008_004908 +2008_004910 +2008_004911 +2008_004914 +2008_004917 +2008_004920 +2008_004921 +2008_004923 +2008_004926 +2008_004930 +2008_004931 +2008_004933 +2008_004934 +2008_004935 +2008_004937 +2008_004938 +2008_004940 +2008_004942 +2008_004945 +2008_004946 +2008_004948 +2008_004950 +2008_004955 +2008_004961 +2008_004964 +2008_004966 +2008_004967 +2008_004968 +2008_004969 +2008_004970 +2008_004973 +2008_004974 +2008_004975 +2008_004976 +2008_004977 +2008_004979 +2008_004981 +2008_004982 +2008_004983 +2008_004984 +2008_004985 +2008_004986 +2008_004990 +2008_004991 +2008_004995 +2008_004998 +2008_005000 +2008_005001 +2008_005003 +2008_005006 +2008_005008 +2008_005010 +2008_005013 +2008_005015 +2008_005016 +2008_005023 +2008_005032 +2008_005033 +2008_005035 +2008_005036 +2008_005037 +2008_005040 +2008_005042 +2008_005043 +2008_005045 +2008_005046 +2008_005049 +2008_005051 +2008_005054 +2008_005055 +2008_005057 +2008_005061 +2008_005063 +2008_005064 +2008_005065 +2008_005066 +2008_005068 +2008_005070 +2008_005071 +2008_005072 +2008_005074 +2008_005078 +2008_005080 +2008_005081 +2008_005082 +2008_005084 +2008_005085 +2008_005088 +2008_005089 +2008_005090 +2008_005092 +2008_005094 +2008_005096 +2008_005097 +2008_005098 +2008_005101 +2008_005105 +2008_005107 +2008_005108 +2008_005109 +2008_005110 +2008_005111 +2008_005114 +2008_005115 +2008_005117 +2008_005123 +2008_005127 +2008_005132 +2008_005133 +2008_005134 +2008_005136 +2008_005137 +2008_005139 +2008_005140 +2008_005146 +2008_005147 +2008_005150 +2008_005151 +2008_005156 +2008_005158 +2008_005159 +2008_005160 +2008_005166 +2008_005167 +2008_005168 +2008_005171 +2008_005172 +2008_005174 +2008_005175 +2008_005178 +2008_005181 +2008_005182 +2008_005183 +2008_005185 +2008_005186 +2008_005190 +2008_005191 +2008_005193 +2008_005194 +2008_005196 +2008_005197 +2008_005201 +2008_005204 +2008_005205 +2008_005208 +2008_005209 +2008_005213 +2008_005214 +2008_005215 +2008_005216 +2008_005217 +2008_005218 +2008_005220 +2008_005221 +2008_005231 +2008_005233 +2008_005234 +2008_005235 +2008_005236 +2008_005240 +2008_005242 +2008_005243 +2008_005244 +2008_005245 +2008_005247 +2008_005248 +2008_005250 +2008_005251 +2008_005252 +2008_005253 +2008_005254 +2008_005255 +2008_005257 +2008_005260 +2008_005261 +2008_005266 +2008_005269 +2008_005270 +2008_005271 +2008_005272 +2008_005276 +2008_005277 +2008_005279 +2008_005281 +2008_005282 +2008_005283 +2008_005288 +2008_005294 +2008_005295 +2008_005296 +2008_005297 +2008_005300 +2008_005303 +2008_005304 +2008_005309 +2008_005310 +2008_005313 +2008_005315 +2008_005316 +2008_005319 +2008_005321 +2008_005323 +2008_005324 +2008_005325 +2008_005327 +2008_005329 +2008_005331 +2008_005333 +2008_005335 +2008_005336 +2008_005337 +2008_005338 +2008_005342 +2008_005345 +2008_005346 +2008_005347 +2008_005348 +2008_005349 +2008_005350 +2008_005354 +2008_005356 +2008_005357 +2008_005359 +2008_005360 +2008_005361 +2008_005362 +2008_005363 +2008_005365 +2008_005367 +2008_005369 +2008_005373 +2008_005374 +2008_005375 +2008_005376 +2008_005378 +2008_005379 +2008_005380 +2008_005382 +2008_005386 +2008_005389 +2008_005393 +2008_005395 +2008_005396 +2008_005398 +2008_005399 +2008_005400 +2008_005404 +2008_005405 +2008_005406 +2008_005408 +2008_005412 +2008_005414 +2008_005415 +2008_005417 +2008_005421 +2008_005422 +2008_005423 +2008_005427 +2008_005429 +2008_005431 +2008_005433 +2008_005436 +2008_005439 +2008_005443 +2008_005444 +2008_005445 +2008_005446 +2008_005447 +2008_005449 +2008_005451 +2008_005455 +2008_005456 +2008_005460 +2008_005463 +2008_005465 +2008_005467 +2008_005469 +2008_005472 +2008_005473 +2008_005477 +2008_005480 +2008_005484 +2008_005485 +2008_005490 +2008_005491 +2008_005494 +2008_005496 +2008_005498 +2008_005500 +2008_005501 +2008_005502 +2008_005504 +2008_005505 +2008_005507 +2008_005510 +2008_005511 +2008_005512 +2008_005514 +2008_005517 +2008_005519 +2008_005521 +2008_005522 +2008_005523 +2008_005525 +2008_005526 +2008_005527 +2008_005530 +2008_005531 +2008_005534 +2008_005536 +2008_005538 +2008_005541 +2008_005544 +2008_005548 +2008_005549 +2008_005550 +2008_005552 +2008_005553 +2008_005558 +2008_005560 +2008_005561 +2008_005563 +2008_005564 +2008_005566 +2008_005567 +2008_005569 +2008_005570 +2008_005572 +2008_005573 +2008_005574 +2008_005582 +2008_005584 +2008_005588 +2008_005589 +2008_005591 +2008_005593 +2008_005599 +2008_005600 +2008_005601 +2008_005603 +2008_005608 +2008_005609 +2008_005610 +2008_005611 +2008_005612 +2008_005614 +2008_005616 +2008_005618 +2008_005623 +2008_005625 +2008_005626 +2008_005627 +2008_005628 +2008_005631 +2008_005633 +2008_005634 +2008_005635 +2008_005636 +2008_005637 +2008_005638 +2008_005639 +2008_005641 +2008_005642 +2008_005643 +2008_005646 +2008_005649 +2008_005650 +2008_005652 +2008_005653 +2008_005656 +2008_005657 +2008_005660 +2008_005663 +2008_005664 +2008_005668 +2008_005673 +2008_005675 +2008_005676 +2008_005677 +2008_005678 +2008_005679 +2008_005680 +2008_005681 +2008_005682 +2008_005683 +2008_005685 +2008_005686 +2008_005687 +2008_005691 +2008_005695 +2008_005698 +2008_005699 +2008_005701 +2008_005702 +2008_005703 +2008_005705 +2008_005706 +2008_005707 +2008_005713 +2008_005714 +2008_005716 +2008_005719 +2008_005720 +2008_005721 +2008_005724 +2008_005726 +2008_005727 +2008_005728 +2008_005732 +2008_005734 +2008_005735 +2008_005736 +2008_005737 +2008_005738 +2008_005739 +2008_005742 +2008_005747 +2008_005748 +2008_005750 +2008_005752 +2008_005757 +2008_005758 +2008_005761 +2008_005763 +2008_005764 +2008_005767 +2008_005768 +2008_005770 +2008_005774 +2008_005777 +2008_005779 +2008_005780 +2008_005788 +2008_005790 +2008_005791 +2008_005792 +2008_005794 +2008_005796 +2008_005798 +2008_005800 +2008_005801 +2008_005803 +2008_005805 +2008_005808 +2008_005810 +2008_005812 +2008_005816 +2008_005817 +2008_005818 +2008_005821 +2008_005822 +2008_005823 +2008_005825 +2008_005831 +2008_005832 +2008_005834 +2008_005838 +2008_005839 +2008_005843 +2008_005845 +2008_005846 +2008_005847 +2008_005848 +2008_005850 +2008_005853 +2008_005855 +2008_005856 +2008_005857 +2008_005860 +2008_005863 +2008_005865 +2008_005867 +2008_005869 +2008_005871 +2008_005873 +2008_005874 +2008_005875 +2008_005877 +2008_005878 +2008_005881 +2008_005882 +2008_005883 +2008_005884 +2008_005889 +2008_005890 +2008_005891 +2008_005893 +2008_005895 +2008_005897 +2008_005898 +2008_005902 +2008_005903 +2008_005904 +2008_005905 +2008_005907 +2008_005914 +2008_005915 +2008_005916 +2008_005918 +2008_005921 +2008_005923 +2008_005924 +2008_005926 +2008_005928 +2008_005929 +2008_005933 +2008_005934 +2008_005935 +2008_005936 +2008_005937 +2008_005938 +2008_005939 +2008_005943 +2008_005945 +2008_005954 +2008_005956 +2008_005957 +2008_005959 +2008_005960 +2008_005962 +2008_005964 +2008_005967 +2008_005968 +2008_005970 +2008_005972 +2008_005975 +2008_005976 +2008_005977 +2008_005978 +2008_005979 +2008_005980 +2008_005982 +2008_005984 +2008_005987 +2008_005989 +2008_005991 +2008_005997 +2008_006000 +2008_006002 +2008_006004 +2008_006007 +2008_006008 +2008_006010 +2008_006014 +2008_006017 +2008_006020 +2008_006021 +2008_006024 +2008_006027 +2008_006028 +2008_006031 +2008_006032 +2008_006034 +2008_006036 +2008_006037 +2008_006038 +2008_006039 +2008_006041 +2008_006042 +2008_006045 +2008_006046 +2008_006047 +2008_006049 +2008_006050 +2008_006052 +2008_006055 +2008_006058 +2008_006059 +2008_006062 +2008_006063 +2008_006064 +2008_006065 +2008_006067 +2008_006068 +2008_006070 +2008_006071 +2008_006072 +2008_006074 +2008_006076 +2008_006078 +2008_006081 +2008_006082 +2008_006085 +2008_006087 +2008_006088 +2008_006090 +2008_006092 +2008_006094 +2008_006096 +2008_006099 +2008_006100 +2008_006102 +2008_006104 +2008_006108 +2008_006109 +2008_006111 +2008_006112 +2008_006113 +2008_006117 +2008_006119 +2008_006120 +2008_006121 +2008_006124 +2008_006128 +2008_006129 +2008_006130 +2008_006133 +2008_006135 +2008_006136 +2008_006140 +2008_006143 +2008_006144 +2008_006145 +2008_006147 +2008_006148 +2008_006151 +2008_006152 +2008_006154 +2008_006158 +2008_006159 +2008_006163 +2008_006164 +2008_006166 +2008_006169 +2008_006170 +2008_006175 +2008_006178 +2008_006179 +2008_006181 +2008_006182 +2008_006185 +2008_006186 +2008_006188 +2008_006190 +2008_006192 +2008_006194 +2008_006195 +2008_006200 +2008_006203 +2008_006205 +2008_006207 +2008_006210 +2008_006211 +2008_006213 +2008_006215 +2008_006216 +2008_006218 +2008_006219 +2008_006220 +2008_006221 +2008_006222 +2008_006224 +2008_006225 +2008_006227 +2008_006229 +2008_006232 +2008_006233 +2008_006234 +2008_006235 +2008_006239 +2008_006240 +2008_006242 +2008_006244 +2008_006249 +2008_006250 +2008_006253 +2008_006254 +2008_006256 +2008_006257 +2008_006258 +2008_006262 +2008_006265 +2008_006267 +2008_006269 +2008_006271 +2008_006272 +2008_006273 +2008_006275 +2008_006276 +2008_006280 +2008_006281 +2008_006282 +2008_006285 +2008_006288 +2008_006289 +2008_006290 +2008_006294 +2008_006295 +2008_006298 +2008_006300 +2008_006303 +2008_006307 +2008_006310 +2008_006311 +2008_006315 +2008_006316 +2008_006317 +2008_006320 +2008_006323 +2008_006325 +2008_006327 +2008_006329 +2008_006330 +2008_006331 +2008_006335 +2008_006336 +2008_006337 +2008_006339 +2008_006341 +2008_006345 +2008_006347 +2008_006349 +2008_006350 +2008_006351 +2008_006353 +2008_006355 +2008_006356 +2008_006359 +2008_006361 +2008_006362 +2008_006364 +2008_006365 +2008_006366 +2008_006368 +2008_006369 +2008_006370 +2008_006373 +2008_006376 +2008_006377 +2008_006382 +2008_006384 +2008_006386 +2008_006387 +2008_006389 +2008_006390 +2008_006392 +2008_006394 +2008_006397 +2008_006400 +2008_006401 +2008_006403 +2008_006404 +2008_006407 +2008_006408 +2008_006409 +2008_006410 +2008_006416 +2008_006417 +2008_006419 +2008_006421 +2008_006424 +2008_006425 +2008_006427 +2008_006429 +2008_006430 +2008_006432 +2008_006433 +2008_006434 +2008_006436 +2008_006438 +2008_006441 +2008_006447 +2008_006448 +2008_006449 +2008_006452 +2008_006458 +2008_006461 +2008_006462 +2008_006463 +2008_006467 +2008_006470 +2008_006474 +2008_006475 +2008_006477 +2008_006480 +2008_006481 +2008_006482 +2008_006483 +2008_006487 +2008_006488 +2008_006489 +2008_006490 +2008_006491 +2008_006496 +2008_006497 +2008_006500 +2008_006502 +2008_006503 +2008_006506 +2008_006509 +2008_006511 +2008_006512 +2008_006517 +2008_006519 +2008_006520 +2008_006522 +2008_006523 +2008_006524 +2008_006526 +2008_006528 +2008_006530 +2008_006534 +2008_006538 +2008_006540 +2008_006543 +2008_006546 +2008_006547 +2008_006548 +2008_006549 +2008_006553 +2008_006554 +2008_006558 +2008_006561 +2008_006562 +2008_006564 +2008_006566 +2008_006567 +2008_006568 +2008_006570 +2008_006576 +2008_006578 +2008_006579 +2008_006585 +2008_006586 +2008_006587 +2008_006588 +2008_006591 +2008_006598 +2008_006599 +2008_006600 +2008_006602 +2008_006604 +2008_006605 +2008_006606 +2008_006609 +2008_006610 +2008_006611 +2008_006613 +2008_006614 +2008_006616 +2008_006617 +2008_006619 +2008_006621 +2008_006623 +2008_006624 +2008_006625 +2008_006626 +2008_006629 +2008_006631 +2008_006634 +2008_006635 +2008_006637 +2008_006638 +2008_006641 +2008_006642 +2008_006645 +2008_006646 +2008_006649 +2008_006650 +2008_006654 +2008_006655 +2008_006656 +2008_006657 +2008_006660 +2008_006662 +2008_006663 +2008_006665 +2008_006667 +2008_006668 +2008_006671 +2008_006677 +2008_006682 +2008_006684 +2008_006686 +2008_006690 +2008_006691 +2008_006692 +2008_006694 +2008_006696 +2008_006700 +2008_006701 +2008_006703 +2008_006705 +2008_006708 +2008_006710 +2008_006712 +2008_006714 +2008_006715 +2008_006716 +2008_006717 +2008_006718 +2008_006719 +2008_006720 +2008_006722 +2008_006724 +2008_006728 +2008_006730 +2008_006731 +2008_006732 +2008_006733 +2008_006737 +2008_006743 +2008_006746 +2008_006747 +2008_006748 +2008_006750 +2008_006751 +2008_006752 +2008_006753 +2008_006758 +2008_006761 +2008_006762 +2008_006764 +2008_006765 +2008_006767 +2008_006773 +2008_006774 +2008_006776 +2008_006777 +2008_006778 +2008_006779 +2008_006781 +2008_006784 +2008_006785 +2008_006792 +2008_006793 +2008_006796 +2008_006797 +2008_006798 +2008_006800 +2008_006802 +2008_006807 +2008_006808 +2008_006810 +2008_006811 +2008_006813 +2008_006815 +2008_006816 +2008_006817 +2008_006818 +2008_006819 +2008_006820 +2008_006824 +2008_006825 +2008_006827 +2008_006828 +2008_006831 +2008_006832 +2008_006833 +2008_006834 +2008_006835 +2008_006837 +2008_006839 +2008_006841 +2008_006843 +2008_006844 +2008_006847 +2008_006849 +2008_006855 +2008_006857 +2008_006863 +2008_006864 +2008_006865 +2008_006868 +2008_006870 +2008_006872 +2008_006873 +2008_006874 +2008_006877 +2008_006879 +2008_006880 +2008_006881 +2008_006882 +2008_006885 +2008_006887 +2008_006889 +2008_006890 +2008_006892 +2008_006896 +2008_006898 +2008_006900 +2008_006902 +2008_006903 +2008_006904 +2008_006907 +2008_006908 +2008_006909 +2008_006910 +2008_006912 +2008_006919 +2008_006920 +2008_006921 +2008_006923 +2008_006924 +2008_006925 +2008_006926 +2008_006933 +2008_006936 +2008_006939 +2008_006941 +2008_006944 +2008_006946 +2008_006948 +2008_006949 +2008_006950 +2008_006951 +2008_006952 +2008_006953 +2008_006954 +2008_006956 +2008_006959 +2008_006960 +2008_006961 +2008_006962 +2008_006965 +2008_006967 +2008_006968 +2008_006969 +2008_006973 +2008_006979 +2008_006980 +2008_006981 +2008_006986 +2008_006987 +2008_006989 +2008_006991 +2008_006992 +2008_006997 +2008_006998 +2008_006999 +2008_007003 +2008_007004 +2008_007006 +2008_007009 +2008_007010 +2008_007011 +2008_007012 +2008_007014 +2008_007019 +2008_007021 +2008_007022 +2008_007025 +2008_007026 +2008_007028 +2008_007030 +2008_007031 +2008_007032 +2008_007034 +2008_007038 +2008_007039 +2008_007042 +2008_007043 +2008_007045 +2008_007048 +2008_007050 +2008_007054 +2008_007056 +2008_007057 +2008_007058 +2008_007059 +2008_007060 +2008_007061 +2008_007064 +2008_007067 +2008_007069 +2008_007070 +2008_007073 +2008_007075 +2008_007076 +2008_007081 +2008_007082 +2008_007084 +2008_007085 +2008_007086 +2008_007090 +2008_007091 +2008_007095 +2008_007096 +2008_007097 +2008_007098 +2008_007101 +2008_007103 +2008_007105 +2008_007106 +2008_007108 +2008_007112 +2008_007114 +2008_007115 +2008_007118 +2008_007119 +2008_007120 +2008_007123 +2008_007124 +2008_007129 +2008_007130 +2008_007131 +2008_007133 +2008_007134 +2008_007138 +2008_007142 +2008_007143 +2008_007145 +2008_007146 +2008_007147 +2008_007151 +2008_007156 +2008_007161 +2008_007163 +2008_007164 +2008_007165 +2008_007166 +2008_007167 +2008_007168 +2008_007169 +2008_007171 +2008_007176 +2008_007179 +2008_007181 +2008_007182 +2008_007184 +2008_007185 +2008_007187 +2008_007188 +2008_007189 +2008_007190 +2008_007194 +2008_007195 +2008_007196 +2008_007197 +2008_007201 +2008_007205 +2008_007207 +2008_007208 +2008_007211 +2008_007214 +2008_007216 +2008_007217 +2008_007218 +2008_007219 +2008_007221 +2008_007222 +2008_007223 +2008_007225 +2008_007226 +2008_007227 +2008_007229 +2008_007231 +2008_007236 +2008_007237 +2008_007239 +2008_007241 +2008_007242 +2008_007245 +2008_007246 +2008_007247 +2008_007250 +2008_007252 +2008_007254 +2008_007256 +2008_007260 +2008_007261 +2008_007264 +2008_007265 +2008_007266 +2008_007269 +2008_007273 +2008_007274 +2008_007277 +2008_007279 +2008_007280 +2008_007281 +2008_007282 +2008_007285 +2008_007286 +2008_007287 +2008_007289 +2008_007291 +2008_007293 +2008_007295 +2008_007298 +2008_007305 +2008_007307 +2008_007311 +2008_007312 +2008_007313 +2008_007314 +2008_007317 +2008_007319 +2008_007320 +2008_007321 +2008_007323 +2008_007324 +2008_007325 +2008_007327 +2008_007332 +2008_007334 +2008_007335 +2008_007336 +2008_007339 +2008_007343 +2008_007344 +2008_007346 +2008_007348 +2008_007350 +2008_007352 +2008_007353 +2008_007356 +2008_007357 +2008_007358 +2008_007361 +2008_007363 +2008_007364 +2008_007374 +2008_007375 +2008_007378 +2008_007382 +2008_007383 +2008_007384 +2008_007388 +2008_007389 +2008_007390 +2008_007392 +2008_007393 +2008_007394 +2008_007397 +2008_007398 +2008_007402 +2008_007403 +2008_007404 +2008_007409 +2008_007410 +2008_007415 +2008_007417 +2008_007421 +2008_007423 +2008_007424 +2008_007425 +2008_007428 +2008_007430 +2008_007431 +2008_007432 +2008_007433 +2008_007434 +2008_007435 +2008_007438 +2008_007441 +2008_007442 +2008_007443 +2008_007444 +2008_007446 +2008_007448 +2008_007452 +2008_007455 +2008_007456 +2008_007458 +2008_007459 +2008_007461 +2008_007465 +2008_007466 +2008_007469 +2008_007470 +2008_007471 +2008_007472 +2008_007473 +2008_007476 +2008_007477 +2008_007478 +2008_007480 +2008_007485 +2008_007486 +2008_007488 +2008_007491 +2008_007494 +2008_007496 +2008_007497 +2008_007498 +2008_007500 +2008_007501 +2008_007504 +2008_007507 +2008_007509 +2008_007510 +2008_007511 +2008_007513 +2008_007514 +2008_007515 +2008_007519 +2008_007521 +2008_007524 +2008_007525 +2008_007527 +2008_007528 +2008_007529 +2008_007531 +2008_007533 +2008_007534 +2008_007536 +2008_007537 +2008_007538 +2008_007544 +2008_007546 +2008_007548 +2008_007556 +2008_007558 +2008_007559 +2008_007561 +2008_007565 +2008_007567 +2008_007573 +2008_007574 +2008_007576 +2008_007579 +2008_007581 +2008_007583 +2008_007584 +2008_007585 +2008_007586 +2008_007587 +2008_007588 +2008_007589 +2008_007591 +2008_007593 +2008_007594 +2008_007595 +2008_007596 +2008_007597 +2008_007599 +2008_007604 +2008_007608 +2008_007610 +2008_007611 +2008_007612 +2008_007613 +2008_007617 +2008_007618 +2008_007621 +2008_007623 +2008_007625 +2008_007629 +2008_007630 +2008_007632 +2008_007635 +2008_007640 +2008_007641 +2008_007643 +2008_007646 +2008_007648 +2008_007649 +2008_007653 +2008_007656 +2008_007660 +2008_007661 +2008_007662 +2008_007664 +2008_007665 +2008_007666 +2008_007668 +2008_007669 +2008_007673 +2008_007675 +2008_007676 +2008_007677 +2008_007682 +2008_007683 +2008_007685 +2008_007688 +2008_007690 +2008_007691 +2008_007692 +2008_007693 +2008_007694 +2008_007696 +2008_007697 +2008_007698 +2008_007701 +2008_007702 +2008_007704 +2008_007706 +2008_007709 +2008_007710 +2008_007714 +2008_007716 +2008_007717 +2008_007719 +2008_007724 +2008_007726 +2008_007729 +2008_007730 +2008_007733 +2008_007735 +2008_007736 +2008_007737 +2008_007738 +2008_007739 +2008_007741 +2008_007742 +2008_007745 +2008_007746 +2008_007748 +2008_007749 +2008_007750 +2008_007752 +2008_007755 +2008_007757 +2008_007758 +2008_007759 +2008_007760 +2008_007761 +2008_007764 +2008_007766 +2008_007768 +2008_007770 +2008_007777 +2008_007779 +2008_007780 +2008_007781 +2008_007786 +2008_007787 +2008_007788 +2008_007789 +2008_007791 +2008_007793 +2008_007794 +2008_007797 +2008_007798 +2008_007804 +2008_007805 +2008_007806 +2008_007811 +2008_007812 +2008_007814 +2008_007816 +2008_007817 +2008_007819 +2008_007823 +2008_007825 +2008_007827 +2008_007828 +2008_007829 +2008_007831 +2008_007833 +2008_007835 +2008_007836 +2008_007837 +2008_007839 +2008_007840 +2008_007841 +2008_007842 +2008_007843 +2008_007848 +2008_007850 +2008_007852 +2008_007853 +2008_007854 +2008_007855 +2008_007858 +2008_007861 +2008_007864 +2008_007869 +2008_007870 +2008_007871 +2008_007872 +2008_007873 +2008_007875 +2008_007877 +2008_007879 +2008_007882 +2008_007883 +2008_007884 +2008_007887 +2008_007888 +2008_007890 +2008_007891 +2008_007893 +2008_007895 +2008_007897 +2008_007902 +2008_007904 +2008_007907 +2008_007909 +2008_007912 +2008_007913 +2008_007914 +2008_007915 +2008_007916 +2008_007917 +2008_007918 +2008_007922 +2008_007923 +2008_007928 +2008_007931 +2008_007932 +2008_007933 +2008_007935 +2008_007936 +2008_007937 +2008_007938 +2008_007940 +2008_007941 +2008_007942 +2008_007945 +2008_007947 +2008_007948 +2008_007949 +2008_007950 +2008_007953 +2008_007954 +2008_007955 +2008_007962 +2008_007964 +2008_007966 +2008_007969 +2008_007970 +2008_007973 +2008_007975 +2008_007977 +2008_007981 +2008_007985 +2008_007986 +2008_007987 +2008_007988 +2008_007989 +2008_007990 +2008_007993 +2008_007994 +2008_007997 +2008_007998 +2008_007999 +2008_008001 +2008_008002 +2008_008004 +2008_008007 +2008_008011 +2008_008012 +2008_008018 +2008_008020 +2008_008021 +2008_008022 +2008_008024 +2008_008025 +2008_008028 +2008_008029 +2008_008031 +2008_008034 +2008_008037 +2008_008040 +2008_008043 +2008_008044 +2008_008048 +2008_008050 +2008_008052 +2008_008053 +2008_008055 +2008_008057 +2008_008058 +2008_008064 +2008_008066 +2008_008069 +2008_008070 +2008_008072 +2008_008073 +2008_008074 +2008_008075 +2008_008080 +2008_008083 +2008_008084 +2008_008086 +2008_008091 +2008_008092 +2008_008093 +2008_008095 +2008_008096 +2008_008097 +2008_008098 +2008_008103 +2008_008105 +2008_008106 +2008_008109 +2008_008112 +2008_008113 +2008_008115 +2008_008116 +2008_008120 +2008_008121 +2008_008122 +2008_008123 +2008_008125 +2008_008127 +2008_008130 +2008_008131 +2008_008132 +2008_008134 +2008_008141 +2008_008145 +2008_008146 +2008_008147 +2008_008148 +2008_008150 +2008_008152 +2008_008154 +2008_008155 +2008_008162 +2008_008166 +2008_008169 +2008_008170 +2008_008175 +2008_008176 +2008_008177 +2008_008179 +2008_008180 +2008_008184 +2008_008185 +2008_008190 +2008_008191 +2008_008192 +2008_008193 +2008_008194 +2008_008197 +2008_008199 +2008_008200 +2008_008203 +2008_008206 +2008_008208 +2008_008210 +2008_008211 +2008_008212 +2008_008215 +2008_008217 +2008_008218 +2008_008220 +2008_008221 +2008_008223 +2008_008224 +2008_008227 +2008_008229 +2008_008231 +2008_008232 +2008_008233 +2008_008234 +2008_008235 +2008_008237 +2008_008241 +2008_008242 +2008_008246 +2008_008247 +2008_008252 +2008_008254 +2008_008257 +2008_008262 +2008_008263 +2008_008266 +2008_008268 +2008_008269 +2008_008271 +2008_008272 +2008_008274 +2008_008275 +2008_008276 +2008_008278 +2008_008279 +2008_008281 +2008_008284 +2008_008287 +2008_008288 +2008_008292 +2008_008294 +2008_008296 +2008_008297 +2008_008300 +2008_008301 +2008_008302 +2008_008307 +2008_008309 +2008_008310 +2008_008313 +2008_008314 +2008_008315 +2008_008318 +2008_008319 +2008_008320 +2008_008321 +2008_008322 +2008_008323 +2008_008324 +2008_008325 +2008_008330 +2008_008331 +2008_008335 +2008_008336 +2008_008337 +2008_008338 +2008_008341 +2008_008342 +2008_008343 +2008_008344 +2008_008345 +2008_008346 +2008_008347 +2008_008354 +2008_008356 +2008_008357 +2008_008359 +2008_008362 +2008_008363 +2008_008364 +2008_008365 +2008_008366 +2008_008368 +2008_008370 +2008_008373 +2008_008376 +2008_008377 +2008_008379 +2008_008380 +2008_008382 +2008_008384 +2008_008387 +2008_008388 +2008_008391 +2008_008392 +2008_008393 +2008_008395 +2008_008402 +2008_008403 +2008_008404 +2008_008406 +2008_008410 +2008_008411 +2008_008416 +2008_008421 +2008_008423 +2008_008424 +2008_008428 +2008_008429 +2008_008431 +2008_008432 +2008_008433 +2008_008434 +2008_008435 +2008_008437 +2008_008439 +2008_008440 +2008_008443 +2008_008444 +2008_008446 +2008_008447 +2008_008450 +2008_008453 +2008_008455 +2008_008461 +2008_008462 +2008_008464 +2008_008466 +2008_008467 +2008_008469 +2008_008470 +2008_008471 +2008_008474 +2008_008476 +2008_008479 +2008_008480 +2008_008482 +2008_008487 +2008_008488 +2008_008490 +2008_008496 +2008_008497 +2008_008500 +2008_008501 +2008_008506 +2008_008507 +2008_008508 +2008_008511 +2008_008512 +2008_008517 +2008_008519 +2008_008521 +2008_008522 +2008_008523 +2008_008524 +2008_008525 +2008_008526 +2008_008527 +2008_008528 +2008_008530 +2008_008531 +2008_008533 +2008_008536 +2008_008537 +2008_008538 +2008_008541 +2008_008544 +2008_008545 +2008_008546 +2008_008547 +2008_008549 +2008_008550 +2008_008552 +2008_008554 +2008_008560 +2008_008564 +2008_008567 +2008_008570 +2008_008572 +2008_008574 +2008_008578 +2008_008579 +2008_008583 +2008_008585 +2008_008588 +2008_008589 +2008_008590 +2008_008591 +2008_008593 +2008_008595 +2008_008598 +2008_008600 +2008_008601 +2008_008606 +2008_008607 +2008_008608 +2008_008611 +2008_008613 +2008_008615 +2008_008616 +2008_008617 +2008_008618 +2008_008619 +2008_008621 +2008_008622 +2008_008623 +2008_008624 +2008_008627 +2008_008628 +2008_008629 +2008_008632 +2008_008635 +2008_008636 +2008_008637 +2008_008641 +2008_008642 +2008_008649 +2008_008652 +2008_008654 +2008_008658 +2008_008659 +2008_008662 +2008_008665 +2008_008666 +2008_008668 +2008_008671 +2008_008673 +2008_008674 +2008_008675 +2008_008676 +2008_008679 +2008_008681 +2008_008682 +2008_008683 +2008_008684 +2008_008685 +2008_008689 +2008_008690 +2008_008691 +2008_008694 +2008_008695 +2008_008696 +2008_008697 +2008_008700 +2008_008701 +2008_008705 +2008_008706 +2008_008707 +2008_008708 +2008_008711 +2008_008713 +2008_008714 +2008_008717 +2008_008718 +2008_008719 +2008_008724 +2008_008725 +2008_008726 +2008_008732 +2008_008735 +2008_008739 +2008_008744 +2008_008745 +2008_008746 +2008_008748 +2008_008749 +2008_008751 +2008_008753 +2008_008755 +2008_008757 +2008_008758 +2008_008765 +2008_008767 +2008_008770 +2008_008772 +2008_008773 +2009_000001 +2009_000002 +2009_000006 +2009_000009 +2009_000010 +2009_000011 +2009_000012 +2009_000013 +2009_000014 +2009_000015 +2009_000016 +2009_000017 +2009_000021 +2009_000022 +2009_000026 +2009_000027 +2009_000028 +2009_000029 +2009_000030 +2009_000032 +2009_000035 +2009_000037 +2009_000039 +2009_000040 +2009_000041 +2009_000042 +2009_000045 +2009_000051 +2009_000052 +2009_000054 +2009_000055 +2009_000056 +2009_000058 +2009_000059 +2009_000060 +2009_000063 +2009_000066 +2009_000067 +2009_000068 +2009_000072 +2009_000073 +2009_000074 +2009_000078 +2009_000080 +2009_000082 +2009_000084 +2009_000085 +2009_000087 +2009_000088 +2009_000089 +2009_000090 +2009_000091 +2009_000093 +2009_000096 +2009_000097 +2009_000100 +2009_000102 +2009_000103 +2009_000104 +2009_000105 +2009_000109 +2009_000119 +2009_000120 +2009_000121 +2009_000122 +2009_000124 +2009_000128 +2009_000130 +2009_000131 +2009_000132 +2009_000133 +2009_000135 +2009_000136 +2009_000137 +2009_000140 +2009_000141 +2009_000142 +2009_000145 +2009_000146 +2009_000149 +2009_000150 +2009_000151 +2009_000156 +2009_000157 +2009_000158 +2009_000159 +2009_000160 +2009_000161 +2009_000164 +2009_000165 +2009_000168 +2009_000169 +2009_000171 +2009_000176 +2009_000177 +2009_000181 +2009_000182 +2009_000183 +2009_000184 +2009_000188 +2009_000189 +2009_000192 +2009_000195 +2009_000197 +2009_000198 +2009_000199 +2009_000201 +2009_000203 +2009_000205 +2009_000206 +2009_000209 +2009_000212 +2009_000214 +2009_000216 +2009_000217 +2009_000218 +2009_000219 +2009_000223 +2009_000225 +2009_000227 +2009_000229 +2009_000232 +2009_000233 +2009_000237 +2009_000239 +2009_000242 +2009_000244 +2009_000247 +2009_000248 +2009_000249 +2009_000250 +2009_000251 +2009_000253 +2009_000254 +2009_000257 +2009_000260 +2009_000268 +2009_000276 +2009_000277 +2009_000280 +2009_000281 +2009_000282 +2009_000283 +2009_000284 +2009_000285 +2009_000286 +2009_000287 +2009_000288 +2009_000289 +2009_000290 +2009_000291 +2009_000293 +2009_000297 +2009_000298 +2009_000300 +2009_000303 +2009_000304 +2009_000305 +2009_000308 +2009_000309 +2009_000312 +2009_000316 +2009_000317 +2009_000318 +2009_000320 +2009_000321 +2009_000322 +2009_000327 +2009_000328 +2009_000330 +2009_000335 +2009_000336 +2009_000337 +2009_000339 +2009_000340 +2009_000341 +2009_000342 +2009_000343 +2009_000344 +2009_000347 +2009_000350 +2009_000351 +2009_000354 +2009_000356 +2009_000366 +2009_000367 +2009_000370 +2009_000375 +2009_000377 +2009_000378 +2009_000379 +2009_000385 +2009_000387 +2009_000389 +2009_000390 +2009_000391 +2009_000393 +2009_000397 +2009_000398 +2009_000399 +2009_000400 +2009_000402 +2009_000405 +2009_000408 +2009_000409 +2009_000410 +2009_000411 +2009_000412 +2009_000414 +2009_000416 +2009_000417 +2009_000418 +2009_000419 +2009_000420 +2009_000421 +2009_000422 +2009_000426 +2009_000430 +2009_000435 +2009_000438 +2009_000439 +2009_000440 +2009_000443 +2009_000444 +2009_000445 +2009_000446 +2009_000449 +2009_000452 +2009_000453 +2009_000454 +2009_000455 +2009_000456 +2009_000457 +2009_000461 +2009_000463 +2009_000464 +2009_000466 +2009_000469 +2009_000471 +2009_000472 +2009_000474 +2009_000476 +2009_000477 +2009_000483 +2009_000486 +2009_000487 +2009_000488 +2009_000491 +2009_000493 +2009_000494 +2009_000496 +2009_000499 +2009_000500 +2009_000501 +2009_000502 +2009_000503 +2009_000504 +2009_000505 +2009_000511 +2009_000512 +2009_000513 +2009_000515 +2009_000516 +2009_000519 +2009_000522 +2009_000523 +2009_000525 +2009_000526 +2009_000527 +2009_000529 +2009_000532 +2009_000535 +2009_000536 +2009_000539 +2009_000542 +2009_000544 +2009_000545 +2009_000546 +2009_000547 +2009_000549 +2009_000550 +2009_000552 +2009_000553 +2009_000557 +2009_000558 +2009_000559 +2009_000560 +2009_000562 +2009_000563 +2009_000565 +2009_000566 +2009_000567 +2009_000568 +2009_000573 +2009_000574 +2009_000575 +2009_000576 +2009_000577 +2009_000579 +2009_000585 +2009_000586 +2009_000590 +2009_000591 +2009_000592 +2009_000593 +2009_000595 +2009_000597 +2009_000599 +2009_000600 +2009_000602 +2009_000603 +2009_000604 +2009_000606 +2009_000608 +2009_000611 +2009_000614 +2009_000615 +2009_000617 +2009_000619 +2009_000624 +2009_000625 +2009_000626 +2009_000628 +2009_000629 +2009_000631 +2009_000632 +2009_000634 +2009_000635 +2009_000636 +2009_000637 +2009_000638 +2009_000641 +2009_000642 +2009_000647 +2009_000648 +2009_000651 +2009_000653 +2009_000655 +2009_000658 +2009_000661 +2009_000662 +2009_000663 +2009_000664 +2009_000670 +2009_000672 +2009_000674 +2009_000675 +2009_000676 +2009_000677 +2009_000679 +2009_000681 +2009_000683 +2009_000684 +2009_000686 +2009_000689 +2009_000690 +2009_000691 +2009_000692 +2009_000694 +2009_000695 +2009_000696 +2009_000702 +2009_000704 +2009_000705 +2009_000708 +2009_000709 +2009_000712 +2009_000716 +2009_000718 +2009_000719 +2009_000720 +2009_000722 +2009_000723 +2009_000724 +2009_000725 +2009_000726 +2009_000727 +2009_000730 +2009_000731 +2009_000732 +2009_000734 +2009_000737 +2009_000741 +2009_000742 +2009_000744 +2009_000745 +2009_000746 +2009_000748 +2009_000750 +2009_000752 +2009_000755 +2009_000756 +2009_000757 +2009_000758 +2009_000759 +2009_000760 +2009_000762 +2009_000763 +2009_000768 +2009_000770 +2009_000771 +2009_000774 +2009_000777 +2009_000778 +2009_000779 +2009_000782 +2009_000783 +2009_000789 +2009_000790 +2009_000791 +2009_000793 +2009_000794 +2009_000796 +2009_000797 +2009_000801 +2009_000804 +2009_000805 +2009_000811 +2009_000812 +2009_000815 +2009_000816 +2009_000817 +2009_000820 +2009_000821 +2009_000823 +2009_000824 +2009_000825 +2009_000828 +2009_000829 +2009_000830 +2009_000831 +2009_000833 +2009_000834 +2009_000837 +2009_000839 +2009_000840 +2009_000843 +2009_000845 +2009_000846 +2009_000848 +2009_000849 +2009_000851 +2009_000852 +2009_000854 +2009_000856 +2009_000858 +2009_000862 +2009_000865 +2009_000867 +2009_000869 +2009_000871 +2009_000874 +2009_000879 +2009_000882 +2009_000886 +2009_000887 +2009_000889 +2009_000890 +2009_000892 +2009_000894 +2009_000895 +2009_000896 +2009_000897 +2009_000898 +2009_000899 +2009_000901 +2009_000902 +2009_000904 +2009_000906 +2009_000909 +2009_000910 +2009_000915 +2009_000919 +2009_000920 +2009_000923 +2009_000924 +2009_000925 +2009_000926 +2009_000927 +2009_000928 +2009_000930 +2009_000931 +2009_000932 +2009_000934 +2009_000935 +2009_000937 +2009_000938 +2009_000939 +2009_000945 +2009_000948 +2009_000953 +2009_000954 +2009_000955 +2009_000958 +2009_000960 +2009_000961 +2009_000962 +2009_000964 +2009_000966 +2009_000967 +2009_000969 +2009_000970 +2009_000971 +2009_000973 +2009_000974 +2009_000975 +2009_000979 +2009_000980 +2009_000981 +2009_000985 +2009_000987 +2009_000989 +2009_000990 +2009_000991 +2009_000992 +2009_000995 +2009_000996 +2009_000998 +2009_001000 +2009_001002 +2009_001006 +2009_001007 +2009_001008 +2009_001009 +2009_001011 +2009_001012 +2009_001013 +2009_001016 +2009_001019 +2009_001021 +2009_001024 +2009_001026 +2009_001027 +2009_001028 +2009_001030 +2009_001036 +2009_001037 +2009_001038 +2009_001040 +2009_001042 +2009_001044 +2009_001052 +2009_001054 +2009_001055 +2009_001056 +2009_001057 +2009_001059 +2009_001061 +2009_001066 +2009_001068 +2009_001069 +2009_001070 +2009_001074 +2009_001075 +2009_001078 +2009_001079 +2009_001081 +2009_001082 +2009_001083 +2009_001084 +2009_001085 +2009_001090 +2009_001091 +2009_001094 +2009_001095 +2009_001096 +2009_001097 +2009_001098 +2009_001100 +2009_001102 +2009_001103 +2009_001104 +2009_001105 +2009_001106 +2009_001107 +2009_001108 +2009_001110 +2009_001111 +2009_001113 +2009_001117 +2009_001118 +2009_001120 +2009_001121 +2009_001124 +2009_001126 +2009_001128 +2009_001129 +2009_001133 +2009_001134 +2009_001135 +2009_001137 +2009_001138 +2009_001139 +2009_001140 +2009_001145 +2009_001146 +2009_001147 +2009_001148 +2009_001151 +2009_001152 +2009_001153 +2009_001154 +2009_001155 +2009_001159 +2009_001160 +2009_001163 +2009_001164 +2009_001166 +2009_001172 +2009_001177 +2009_001180 +2009_001181 +2009_001184 +2009_001188 +2009_001190 +2009_001192 +2009_001194 +2009_001195 +2009_001196 +2009_001197 +2009_001198 +2009_001199 +2009_001201 +2009_001203 +2009_001205 +2009_001206 +2009_001207 +2009_001208 +2009_001212 +2009_001215 +2009_001216 +2009_001217 +2009_001221 +2009_001224 +2009_001225 +2009_001227 +2009_001229 +2009_001230 +2009_001236 +2009_001237 +2009_001238 +2009_001240 +2009_001241 +2009_001242 +2009_001243 +2009_001245 +2009_001249 +2009_001251 +2009_001252 +2009_001253 +2009_001254 +2009_001255 +2009_001257 +2009_001259 +2009_001260 +2009_001263 +2009_001264 +2009_001266 +2009_001268 +2009_001270 +2009_001271 +2009_001278 +2009_001279 +2009_001282 +2009_001283 +2009_001285 +2009_001286 +2009_001288 +2009_001289 +2009_001291 +2009_001299 +2009_001300 +2009_001301 +2009_001303 +2009_001305 +2009_001306 +2009_001308 +2009_001309 +2009_001311 +2009_001312 +2009_001313 +2009_001314 +2009_001316 +2009_001319 +2009_001320 +2009_001321 +2009_001322 +2009_001323 +2009_001326 +2009_001327 +2009_001328 +2009_001329 +2009_001332 +2009_001333 +2009_001339 +2009_001343 +2009_001344 +2009_001345 +2009_001348 +2009_001349 +2009_001350 +2009_001354 +2009_001355 +2009_001357 +2009_001359 +2009_001360 +2009_001361 +2009_001363 +2009_001364 +2009_001366 +2009_001367 +2009_001368 +2009_001369 +2009_001370 +2009_001371 +2009_001372 +2009_001374 +2009_001375 +2009_001376 +2009_001384 +2009_001385 +2009_001387 +2009_001388 +2009_001389 +2009_001390 +2009_001391 +2009_001393 +2009_001395 +2009_001397 +2009_001398 +2009_001403 +2009_001406 +2009_001407 +2009_001409 +2009_001411 +2009_001412 +2009_001413 +2009_001414 +2009_001417 +2009_001419 +2009_001422 +2009_001424 +2009_001426 +2009_001427 +2009_001431 +2009_001433 +2009_001434 +2009_001435 +2009_001437 +2009_001440 +2009_001443 +2009_001444 +2009_001446 +2009_001447 +2009_001448 +2009_001449 +2009_001450 +2009_001452 +2009_001453 +2009_001456 +2009_001457 +2009_001462 +2009_001463 +2009_001466 +2009_001468 +2009_001470 +2009_001472 +2009_001474 +2009_001475 +2009_001476 +2009_001479 +2009_001480 +2009_001481 +2009_001484 +2009_001490 +2009_001493 +2009_001494 +2009_001498 +2009_001500 +2009_001501 +2009_001502 +2009_001505 +2009_001507 +2009_001508 +2009_001509 +2009_001514 +2009_001516 +2009_001517 +2009_001518 +2009_001519 +2009_001521 +2009_001522 +2009_001526 +2009_001534 +2009_001535 +2009_001536 +2009_001537 +2009_001538 +2009_001539 +2009_001541 +2009_001542 +2009_001544 +2009_001546 +2009_001549 +2009_001550 +2009_001553 +2009_001554 +2009_001555 +2009_001558 +2009_001562 +2009_001565 +2009_001566 +2009_001567 +2009_001568 +2009_001570 +2009_001575 +2009_001577 +2009_001581 +2009_001585 +2009_001587 +2009_001589 +2009_001590 +2009_001591 +2009_001593 +2009_001594 +2009_001595 +2009_001598 +2009_001602 +2009_001605 +2009_001606 +2009_001607 +2009_001608 +2009_001611 +2009_001612 +2009_001614 +2009_001615 +2009_001617 +2009_001618 +2009_001621 +2009_001623 +2009_001625 +2009_001627 +2009_001631 +2009_001633 +2009_001635 +2009_001636 +2009_001638 +2009_001640 +2009_001642 +2009_001643 +2009_001644 +2009_001645 +2009_001646 +2009_001648 +2009_001651 +2009_001653 +2009_001657 +2009_001660 +2009_001663 +2009_001664 +2009_001667 +2009_001670 +2009_001671 +2009_001673 +2009_001674 +2009_001675 +2009_001676 +2009_001677 +2009_001678 +2009_001682 +2009_001683 +2009_001684 +2009_001687 +2009_001689 +2009_001690 +2009_001693 +2009_001695 +2009_001696 +2009_001699 +2009_001704 +2009_001705 +2009_001706 +2009_001707 +2009_001709 +2009_001713 +2009_001715 +2009_001718 +2009_001719 +2009_001720 +2009_001723 +2009_001724 +2009_001731 +2009_001732 +2009_001733 +2009_001734 +2009_001735 +2009_001738 +2009_001740 +2009_001741 +2009_001743 +2009_001744 +2009_001746 +2009_001747 +2009_001749 +2009_001750 +2009_001751 +2009_001752 +2009_001754 +2009_001755 +2009_001758 +2009_001759 +2009_001764 +2009_001765 +2009_001767 +2009_001768 +2009_001770 +2009_001774 +2009_001775 +2009_001778 +2009_001779 +2009_001780 +2009_001781 +2009_001782 +2009_001783 +2009_001784 +2009_001792 +2009_001794 +2009_001798 +2009_001799 +2009_001800 +2009_001801 +2009_001802 +2009_001804 +2009_001805 +2009_001806 +2009_001807 +2009_001809 +2009_001810 +2009_001811 +2009_001812 +2009_001816 +2009_001817 +2009_001818 +2009_001820 +2009_001822 +2009_001823 +2009_001825 +2009_001826 +2009_001827 +2009_001828 +2009_001830 +2009_001831 +2009_001833 +2009_001835 +2009_001837 +2009_001839 +2009_001840 +2009_001846 +2009_001847 +2009_001848 +2009_001850 +2009_001851 +2009_001852 +2009_001853 +2009_001854 +2009_001856 +2009_001858 +2009_001861 +2009_001864 +2009_001865 +2009_001867 +2009_001868 +2009_001869 +2009_001871 +2009_001873 +2009_001874 +2009_001875 +2009_001881 +2009_001884 +2009_001885 +2009_001888 +2009_001890 +2009_001894 +2009_001897 +2009_001898 +2009_001902 +2009_001904 +2009_001905 +2009_001906 +2009_001907 +2009_001908 +2009_001909 +2009_001910 +2009_001911 +2009_001915 +2009_001916 +2009_001917 +2009_001922 +2009_001926 +2009_001927 +2009_001929 +2009_001931 +2009_001933 +2009_001934 +2009_001937 +2009_001940 +2009_001941 +2009_001945 +2009_001948 +2009_001949 +2009_001952 +2009_001959 +2009_001960 +2009_001961 +2009_001962 +2009_001964 +2009_001965 +2009_001967 +2009_001971 +2009_001972 +2009_001973 +2009_001975 +2009_001976 +2009_001977 +2009_001979 +2009_001980 +2009_001984 +2009_001988 +2009_001990 +2009_001991 +2009_001994 +2009_001997 +2009_001999 +2009_002000 +2009_002001 +2009_002002 +2009_002003 +2009_002008 +2009_002009 +2009_002010 +2009_002011 +2009_002012 +2009_002018 +2009_002019 +2009_002024 +2009_002031 +2009_002035 +2009_002037 +2009_002039 +2009_002040 +2009_002042 +2009_002044 +2009_002046 +2009_002047 +2009_002052 +2009_002053 +2009_002054 +2009_002055 +2009_002056 +2009_002057 +2009_002058 +2009_002060 +2009_002061 +2009_002064 +2009_002066 +2009_002072 +2009_002073 +2009_002077 +2009_002078 +2009_002082 +2009_002083 +2009_002086 +2009_002087 +2009_002088 +2009_002089 +2009_002093 +2009_002094 +2009_002096 +2009_002097 +2009_002098 +2009_002099 +2009_002103 +2009_002104 +2009_002105 +2009_002107 +2009_002110 +2009_002111 +2009_002112 +2009_002116 +2009_002117 +2009_002118 +2009_002119 +2009_002120 +2009_002122 +2009_002123 +2009_002126 +2009_002127 +2009_002128 +2009_002129 +2009_002131 +2009_002133 +2009_002136 +2009_002137 +2009_002139 +2009_002141 +2009_002144 +2009_002145 +2009_002146 +2009_002147 +2009_002149 +2009_002150 +2009_002151 +2009_002152 +2009_002153 +2009_002155 +2009_002164 +2009_002165 +2009_002169 +2009_002171 +2009_002173 +2009_002175 +2009_002176 +2009_002177 +2009_002180 +2009_002182 +2009_002185 +2009_002191 +2009_002192 +2009_002193 +2009_002194 +2009_002197 +2009_002198 +2009_002199 +2009_002202 +2009_002203 +2009_002204 +2009_002205 +2009_002208 +2009_002211 +2009_002212 +2009_002214 +2009_002215 +2009_002216 +2009_002219 +2009_002221 +2009_002222 +2009_002225 +2009_002226 +2009_002228 +2009_002229 +2009_002230 +2009_002231 +2009_002232 +2009_002235 +2009_002236 +2009_002238 +2009_002239 +2009_002240 +2009_002242 +2009_002245 +2009_002252 +2009_002253 +2009_002254 +2009_002256 +2009_002257 +2009_002258 +2009_002259 +2009_002262 +2009_002264 +2009_002265 +2009_002267 +2009_002268 +2009_002271 +2009_002272 +2009_002273 +2009_002274 +2009_002281 +2009_002282 +2009_002285 +2009_002286 +2009_002289 +2009_002291 +2009_002295 +2009_002297 +2009_002298 +2009_002299 +2009_002301 +2009_002302 +2009_002305 +2009_002306 +2009_002308 +2009_002311 +2009_002312 +2009_002314 +2009_002317 +2009_002319 +2009_002320 +2009_002324 +2009_002325 +2009_002326 +2009_002328 +2009_002331 +2009_002333 +2009_002335 +2009_002338 +2009_002339 +2009_002343 +2009_002346 +2009_002348 +2009_002349 +2009_002350 +2009_002352 +2009_002358 +2009_002360 +2009_002362 +2009_002363 +2009_002366 +2009_002370 +2009_002371 +2009_002372 +2009_002373 +2009_002374 +2009_002376 +2009_002377 +2009_002380 +2009_002381 +2009_002382 +2009_002386 +2009_002387 +2009_002388 +2009_002390 +2009_002391 +2009_002393 +2009_002397 +2009_002398 +2009_002399 +2009_002400 +2009_002401 +2009_002404 +2009_002406 +2009_002407 +2009_002408 +2009_002409 +2009_002414 +2009_002415 +2009_002416 +2009_002419 +2009_002420 +2009_002422 +2009_002423 +2009_002424 +2009_002425 +2009_002429 +2009_002431 +2009_002432 +2009_002433 +2009_002434 +2009_002436 +2009_002438 +2009_002439 +2009_002441 +2009_002443 +2009_002444 +2009_002445 +2009_002448 +2009_002449 +2009_002452 +2009_002453 +2009_002456 +2009_002457 +2009_002460 +2009_002464 +2009_002465 +2009_002470 +2009_002471 +2009_002472 +2009_002474 +2009_002475 +2009_002476 +2009_002477 +2009_002487 +2009_002488 +2009_002499 +2009_002500 +2009_002504 +2009_002505 +2009_002506 +2009_002510 +2009_002512 +2009_002514 +2009_002515 +2009_002517 +2009_002518 +2009_002519 +2009_002521 +2009_002522 +2009_002523 +2009_002524 +2009_002525 +2009_002527 +2009_002530 +2009_002531 +2009_002532 +2009_002535 +2009_002536 +2009_002537 +2009_002539 +2009_002542 +2009_002543 +2009_002546 +2009_002549 +2009_002552 +2009_002553 +2009_002556 +2009_002557 +2009_002558 +2009_002559 +2009_002561 +2009_002562 +2009_002563 +2009_002565 +2009_002566 +2009_002567 +2009_002568 +2009_002569 +2009_002570 +2009_002571 +2009_002573 +2009_002577 +2009_002579 +2009_002580 +2009_002584 +2009_002585 +2009_002586 +2009_002588 +2009_002591 +2009_002592 +2009_002594 +2009_002595 +2009_002597 +2009_002599 +2009_002604 +2009_002605 +2009_002607 +2009_002608 +2009_002609 +2009_002611 +2009_002612 +2009_002613 +2009_002614 +2009_002615 +2009_002616 +2009_002618 +2009_002620 +2009_002621 +2009_002624 +2009_002625 +2009_002626 +2009_002628 +2009_002629 +2009_002632 +2009_002634 +2009_002635 +2009_002638 +2009_002645 +2009_002648 +2009_002649 +2009_002651 +2009_002652 +2009_002659 +2009_002662 +2009_002663 +2009_002665 +2009_002667 +2009_002668 +2009_002669 +2009_002670 +2009_002671 +2009_002672 +2009_002673 +2009_002674 +2009_002675 +2009_002676 +2009_002680 +2009_002681 +2009_002683 +2009_002684 +2009_002685 +2009_002687 +2009_002688 +2009_002689 +2009_002695 +2009_002697 +2009_002698 +2009_002703 +2009_002704 +2009_002705 +2009_002708 +2009_002710 +2009_002711 +2009_002712 +2009_002713 +2009_002714 +2009_002715 +2009_002717 +2009_002719 +2009_002725 +2009_002727 +2009_002728 +2009_002732 +2009_002733 +2009_002734 +2009_002739 +2009_002741 +2009_002743 +2009_002744 +2009_002746 +2009_002749 +2009_002750 +2009_002752 +2009_002753 +2009_002754 +2009_002755 +2009_002758 +2009_002759 +2009_002762 +2009_002763 +2009_002764 +2009_002765 +2009_002770 +2009_002771 +2009_002772 +2009_002774 +2009_002777 +2009_002778 +2009_002779 +2009_002780 +2009_002784 +2009_002785 +2009_002789 +2009_002790 +2009_002791 +2009_002792 +2009_002798 +2009_002799 +2009_002800 +2009_002803 +2009_002806 +2009_002807 +2009_002808 +2009_002809 +2009_002813 +2009_002814 +2009_002816 +2009_002817 +2009_002820 +2009_002824 +2009_002827 +2009_002830 +2009_002831 +2009_002833 +2009_002835 +2009_002836 +2009_002837 +2009_002838 +2009_002841 +2009_002842 +2009_002843 +2009_002844 +2009_002845 +2009_002847 +2009_002849 +2009_002850 +2009_002851 +2009_002853 +2009_002855 +2009_002856 +2009_002862 +2009_002865 +2009_002867 +2009_002869 +2009_002872 +2009_002876 +2009_002877 +2009_002879 +2009_002882 +2009_002883 +2009_002885 +2009_002887 +2009_002888 +2009_002890 +2009_002893 +2009_002894 +2009_002897 +2009_002898 +2009_002901 +2009_002902 +2009_002908 +2009_002910 +2009_002912 +2009_002914 +2009_002917 +2009_002918 +2009_002920 +2009_002921 +2009_002925 +2009_002928 +2009_002932 +2009_002933 +2009_002935 +2009_002936 +2009_002937 +2009_002938 +2009_002940 +2009_002941 +2009_002946 +2009_002947 +2009_002952 +2009_002954 +2009_002955 +2009_002957 +2009_002958 +2009_002960 +2009_002961 +2009_002962 +2009_002967 +2009_002970 +2009_002971 +2009_002972 +2009_002975 +2009_002976 +2009_002977 +2009_002978 +2009_002980 +2009_002982 +2009_002983 +2009_002984 +2009_002985 +2009_002986 +2009_002988 +2009_002990 +2009_002993 +2009_002995 +2009_002998 +2009_002999 +2009_003000 +2009_003002 +2009_003003 +2009_003005 +2009_003006 +2009_003007 +2009_003010 +2009_003012 +2009_003013 +2009_003018 +2009_003019 +2009_003020 +2009_003022 +2009_003023 +2009_003031 +2009_003032 +2009_003033 +2009_003034 +2009_003035 +2009_003039 +2009_003042 +2009_003043 +2009_003044 +2009_003052 +2009_003053 +2009_003054 +2009_003056 +2009_003058 +2009_003059 +2009_003063 +2009_003064 +2009_003065 +2009_003066 +2009_003067 +2009_003068 +2009_003070 +2009_003071 +2009_003074 +2009_003075 +2009_003076 +2009_003077 +2009_003078 +2009_003080 +2009_003082 +2009_003083 +2009_003084 +2009_003087 +2009_003088 +2009_003089 +2009_003090 +2009_003091 +2009_003093 +2009_003095 +2009_003097 +2009_003098 +2009_003105 +2009_003107 +2009_003108 +2009_003109 +2009_003110 +2009_003114 +2009_003115 +2009_003116 +2009_003118 +2009_003122 +2009_003123 +2009_003125 +2009_003126 +2009_003127 +2009_003128 +2009_003129 +2009_003130 +2009_003132 +2009_003136 +2009_003138 +2009_003140 +2009_003142 +2009_003143 +2009_003144 +2009_003146 +2009_003147 +2009_003150 +2009_003151 +2009_003153 +2009_003154 +2009_003155 +2009_003156 +2009_003157 +2009_003164 +2009_003165 +2009_003166 +2009_003168 +2009_003172 +2009_003173 +2009_003175 +2009_003181 +2009_003183 +2009_003185 +2009_003187 +2009_003189 +2009_003191 +2009_003193 +2009_003194 +2009_003196 +2009_003198 +2009_003199 +2009_003200 +2009_003201 +2009_003204 +2009_003208 +2009_003209 +2009_003212 +2009_003214 +2009_003217 +2009_003218 +2009_003219 +2009_003222 +2009_003224 +2009_003225 +2009_003229 +2009_003230 +2009_003232 +2009_003233 +2009_003234 +2009_003238 +2009_003241 +2009_003247 +2009_003249 +2009_003251 +2009_003253 +2009_003254 +2009_003255 +2009_003257 +2009_003259 +2009_003261 +2009_003262 +2009_003265 +2009_003266 +2009_003267 +2009_003269 +2009_003271 +2009_003272 +2009_003273 +2009_003276 +2009_003277 +2009_003278 +2009_003282 +2009_003284 +2009_003285 +2009_003288 +2009_003290 +2009_003294 +2009_003297 +2009_003299 +2009_003300 +2009_003301 +2009_003304 +2009_003305 +2009_003309 +2009_003310 +2009_003311 +2009_003312 +2009_003315 +2009_003316 +2009_003317 +2009_003320 +2009_003323 +2009_003326 +2009_003327 +2009_003333 +2009_003338 +2009_003340 +2009_003343 +2009_003345 +2009_003346 +2009_003347 +2009_003348 +2009_003349 +2009_003350 +2009_003351 +2009_003352 +2009_003353 +2009_003360 +2009_003361 +2009_003363 +2009_003365 +2009_003367 +2009_003369 +2009_003372 +2009_003373 +2009_003375 +2009_003376 +2009_003377 +2009_003378 +2009_003379 +2009_003380 +2009_003381 +2009_003383 +2009_003384 +2009_003385 +2009_003386 +2009_003387 +2009_003394 +2009_003395 +2009_003396 +2009_003399 +2009_003400 +2009_003402 +2009_003406 +2009_003407 +2009_003409 +2009_003411 +2009_003415 +2009_003416 +2009_003417 +2009_003419 +2009_003422 +2009_003425 +2009_003430 +2009_003431 +2009_003433 +2009_003436 +2009_003440 +2009_003441 +2009_003443 +2009_003445 +2009_003446 +2009_003447 +2009_003450 +2009_003453 +2009_003454 +2009_003455 +2009_003456 +2009_003457 +2009_003458 +2009_003459 +2009_003460 +2009_003461 +2009_003462 +2009_003466 +2009_003467 +2009_003468 +2009_003469 +2009_003476 +2009_003481 +2009_003482 +2009_003487 +2009_003488 +2009_003489 +2009_003490 +2009_003491 +2009_003492 +2009_003494 +2009_003497 +2009_003498 +2009_003499 +2009_003500 +2009_003504 +2009_003507 +2009_003508 +2009_003509 +2009_003510 +2009_003511 +2009_003513 +2009_003517 +2009_003519 +2009_003520 +2009_003521 +2009_003522 +2009_003523 +2009_003524 +2009_003528 +2009_003530 +2009_003531 +2009_003533 +2009_003534 +2009_003537 +2009_003538 +2009_003539 +2009_003540 +2009_003541 +2009_003542 +2009_003543 +2009_003544 +2009_003545 +2009_003546 +2009_003549 +2009_003551 +2009_003554 +2009_003555 +2009_003560 +2009_003562 +2009_003563 +2009_003564 +2009_003565 +2009_003566 +2009_003569 +2009_003571 +2009_003572 +2009_003576 +2009_003577 +2009_003581 +2009_003583 +2009_003588 +2009_003589 +2009_003592 +2009_003594 +2009_003598 +2009_003600 +2009_003601 +2009_003605 +2009_003606 +2009_003607 +2009_003608 +2009_003609 +2009_003612 +2009_003613 +2009_003614 +2009_003618 +2009_003624 +2009_003626 +2009_003627 +2009_003629 +2009_003633 +2009_003634 +2009_003635 +2009_003636 +2009_003637 +2009_003638 +2009_003639 +2009_003640 +2009_003642 +2009_003644 +2009_003646 +2009_003647 +2009_003650 +2009_003652 +2009_003654 +2009_003655 +2009_003656 +2009_003657 +2009_003660 +2009_003663 +2009_003664 +2009_003666 +2009_003667 +2009_003668 +2009_003669 +2009_003671 +2009_003677 +2009_003679 +2009_003683 +2009_003685 +2009_003686 +2009_003688 +2009_003689 +2009_003690 +2009_003694 +2009_003695 +2009_003696 +2009_003697 +2009_003698 +2009_003702 +2009_003703 +2009_003704 +2009_003705 +2009_003707 +2009_003708 +2009_003709 +2009_003710 +2009_003711 +2009_003713 +2009_003714 +2009_003717 +2009_003718 +2009_003720 +2009_003722 +2009_003725 +2009_003726 +2009_003732 +2009_003734 +2009_003735 +2009_003736 +2009_003738 +2009_003739 +2009_003743 +2009_003747 +2009_003751 +2009_003752 +2009_003753 +2009_003756 +2009_003757 +2009_003758 +2009_003759 +2009_003760 +2009_003765 +2009_003768 +2009_003771 +2009_003773 +2009_003775 +2009_003776 +2009_003781 +2009_003783 +2009_003784 +2009_003785 +2009_003786 +2009_003790 +2009_003793 +2009_003795 +2009_003799 +2009_003800 +2009_003801 +2009_003802 +2009_003804 +2009_003805 +2009_003806 +2009_003808 +2009_003810 +2009_003813 +2009_003814 +2009_003815 +2009_003816 +2009_003818 +2009_003819 +2009_003820 +2009_003821 +2009_003822 +2009_003825 +2009_003827 +2009_003829 +2009_003832 +2009_003835 +2009_003836 +2009_003837 +2009_003838 +2009_003840 +2009_003843 +2009_003846 +2009_003847 +2009_003848 +2009_003849 +2009_003852 +2009_003855 +2009_003857 +2009_003858 +2009_003860 +2009_003863 +2009_003865 +2009_003867 +2009_003870 +2009_003873 +2009_003874 +2009_003879 +2009_003883 +2009_003884 +2009_003888 +2009_003892 +2009_003895 +2009_003896 +2009_003897 +2009_003899 +2009_003900 +2009_003901 +2009_003902 +2009_003903 +2009_003904 +2009_003905 +2009_003908 +2009_003911 +2009_003912 +2009_003913 +2009_003914 +2009_003916 +2009_003920 +2009_003921 +2009_003922 +2009_003928 +2009_003929 +2009_003933 +2009_003936 +2009_003938 +2009_003942 +2009_003944 +2009_003947 +2009_003950 +2009_003951 +2009_003955 +2009_003956 +2009_003958 +2009_003961 +2009_003962 +2009_003965 +2009_003966 +2009_003969 +2009_003971 +2009_003973 +2009_003974 +2009_003975 +2009_003976 +2009_003977 +2009_003982 +2009_003985 +2009_003986 +2009_003991 +2009_003992 +2009_003993 +2009_003994 +2009_003995 +2009_004001 +2009_004002 +2009_004004 +2009_004005 +2009_004007 +2009_004012 +2009_004016 +2009_004018 +2009_004019 +2009_004020 +2009_004021 +2009_004022 +2009_004023 +2009_004025 +2009_004031 +2009_004032 +2009_004033 +2009_004034 +2009_004037 +2009_004038 +2009_004040 +2009_004042 +2009_004043 +2009_004044 +2009_004050 +2009_004051 +2009_004052 +2009_004055 +2009_004058 +2009_004062 +2009_004069 +2009_004070 +2009_004072 +2009_004073 +2009_004074 +2009_004075 +2009_004076 +2009_004078 +2009_004082 +2009_004083 +2009_004084 +2009_004085 +2009_004088 +2009_004091 +2009_004092 +2009_004093 +2009_004094 +2009_004095 +2009_004096 +2009_004099 +2009_004100 +2009_004102 +2009_004103 +2009_004105 +2009_004108 +2009_004109 +2009_004111 +2009_004112 +2009_004113 +2009_004117 +2009_004118 +2009_004121 +2009_004122 +2009_004124 +2009_004125 +2009_004126 +2009_004128 +2009_004129 +2009_004131 +2009_004133 +2009_004134 +2009_004138 +2009_004139 +2009_004140 +2009_004141 +2009_004142 +2009_004148 +2009_004150 +2009_004152 +2009_004153 +2009_004154 +2009_004157 +2009_004159 +2009_004161 +2009_004162 +2009_004163 +2009_004164 +2009_004165 +2009_004166 +2009_004168 +2009_004169 +2009_004170 +2009_004171 +2009_004173 +2009_004174 +2009_004175 +2009_004176 +2009_004177 +2009_004178 +2009_004179 +2009_004180 +2009_004181 +2009_004183 +2009_004186 +2009_004187 +2009_004188 +2009_004191 +2009_004193 +2009_004197 +2009_004199 +2009_004200 +2009_004201 +2009_004202 +2009_004203 +2009_004205 +2009_004207 +2009_004210 +2009_004211 +2009_004212 +2009_004213 +2009_004217 +2009_004218 +2009_004221 +2009_004222 +2009_004224 +2009_004225 +2009_004227 +2009_004228 +2009_004229 +2009_004231 +2009_004232 +2009_004233 +2009_004234 +2009_004241 +2009_004242 +2009_004243 +2009_004244 +2009_004247 +2009_004248 +2009_004249 +2009_004255 +2009_004258 +2009_004261 +2009_004262 +2009_004263 +2009_004264 +2009_004271 +2009_004272 +2009_004273 +2009_004274 +2009_004276 +2009_004277 +2009_004278 +2009_004279 +2009_004283 +2009_004284 +2009_004285 +2009_004289 +2009_004290 +2009_004291 +2009_004295 +2009_004298 +2009_004300 +2009_004301 +2009_004303 +2009_004307 +2009_004308 +2009_004309 +2009_004312 +2009_004315 +2009_004316 +2009_004317 +2009_004319 +2009_004322 +2009_004323 +2009_004324 +2009_004327 +2009_004328 +2009_004329 +2009_004332 +2009_004334 +2009_004336 +2009_004338 +2009_004340 +2009_004341 +2009_004346 +2009_004347 +2009_004350 +2009_004351 +2009_004357 +2009_004358 +2009_004359 +2009_004361 +2009_004364 +2009_004366 +2009_004368 +2009_004369 +2009_004370 +2009_004371 +2009_004374 +2009_004375 +2009_004377 +2009_004382 +2009_004383 +2009_004390 +2009_004392 +2009_004394 +2009_004397 +2009_004399 +2009_004402 +2009_004403 +2009_004404 +2009_004406 +2009_004409 +2009_004410 +2009_004411 +2009_004414 +2009_004417 +2009_004419 +2009_004424 +2009_004425 +2009_004426 +2009_004429 +2009_004432 +2009_004434 +2009_004435 +2009_004436 +2009_004438 +2009_004440 +2009_004442 +2009_004444 +2009_004445 +2009_004446 +2009_004448 +2009_004449 +2009_004451 +2009_004452 +2009_004453 +2009_004454 +2009_004455 +2009_004456 +2009_004457 +2009_004464 +2009_004465 +2009_004468 +2009_004471 +2009_004475 +2009_004477 +2009_004478 +2009_004479 +2009_004483 +2009_004486 +2009_004492 +2009_004494 +2009_004496 +2009_004497 +2009_004499 +2009_004501 +2009_004502 +2009_004503 +2009_004504 +2009_004507 +2009_004508 +2009_004509 +2009_004511 +2009_004513 +2009_004514 +2009_004518 +2009_004519 +2009_004524 +2009_004525 +2009_004527 +2009_004529 +2009_004530 +2009_004532 +2009_004535 +2009_004536 +2009_004537 +2009_004539 +2009_004540 +2009_004542 +2009_004543 +2009_004545 +2009_004547 +2009_004548 +2009_004551 +2009_004552 +2009_004554 +2009_004556 +2009_004557 +2009_004559 +2009_004560 +2009_004561 +2009_004562 +2009_004565 +2009_004567 +2009_004568 +2009_004570 +2009_004571 +2009_004572 +2009_004579 +2009_004580 +2009_004581 +2009_004582 +2009_004587 +2009_004588 +2009_004590 +2009_004592 +2009_004593 +2009_004594 +2009_004598 +2009_004601 +2009_004606 +2009_004607 +2009_004614 +2009_004616 +2009_004619 +2009_004620 +2009_004623 +2009_004624 +2009_004625 +2009_004626 +2009_004628 +2009_004629 +2009_004630 +2009_004631 +2009_004634 +2009_004635 +2009_004639 +2009_004642 +2009_004643 +2009_004645 +2009_004647 +2009_004648 +2009_004651 +2009_004652 +2009_004653 +2009_004655 +2009_004656 +2009_004661 +2009_004662 +2009_004664 +2009_004667 +2009_004669 +2009_004670 +2009_004671 +2009_004674 +2009_004677 +2009_004679 +2009_004681 +2009_004683 +2009_004684 +2009_004686 +2009_004687 +2009_004688 +2009_004694 +2009_004697 +2009_004701 +2009_004705 +2009_004706 +2009_004708 +2009_004709 +2009_004710 +2009_004713 +2009_004716 +2009_004718 +2009_004719 +2009_004720 +2009_004721 +2009_004723 +2009_004728 +2009_004730 +2009_004731 +2009_004732 +2009_004734 +2009_004737 +2009_004738 +2009_004744 +2009_004745 +2009_004746 +2009_004748 +2009_004749 +2009_004754 +2009_004756 +2009_004758 +2009_004759 +2009_004760 +2009_004761 +2009_004763 +2009_004764 +2009_004765 +2009_004766 +2009_004768 +2009_004769 +2009_004771 +2009_004772 +2009_004779 +2009_004780 +2009_004781 +2009_004782 +2009_004784 +2009_004786 +2009_004787 +2009_004789 +2009_004790 +2009_004794 +2009_004796 +2009_004797 +2009_004798 +2009_004799 +2009_004801 +2009_004804 +2009_004805 +2009_004806 +2009_004812 +2009_004813 +2009_004815 +2009_004817 +2009_004820 +2009_004822 +2009_004823 +2009_004824 +2009_004828 +2009_004829 +2009_004830 +2009_004831 +2009_004834 +2009_004836 +2009_004839 +2009_004841 +2009_004845 +2009_004846 +2009_004847 +2009_004848 +2009_004849 +2009_004855 +2009_004856 +2009_004857 +2009_004858 +2009_004859 +2009_004865 +2009_004867 +2009_004868 +2009_004869 +2009_004871 +2009_004872 +2009_004874 +2009_004876 +2009_004877 +2009_004880 +2009_004882 +2009_004885 +2009_004886 +2009_004887 +2009_004888 +2009_004889 +2009_004890 +2009_004895 +2009_004897 +2009_004898 +2009_004899 +2009_004901 +2009_004902 +2009_004903 +2009_004904 +2009_004905 +2009_004907 +2009_004913 +2009_004914 +2009_004917 +2009_004919 +2009_004921 +2009_004922 +2009_004926 +2009_004929 +2009_004930 +2009_004933 +2009_004934 +2009_004939 +2009_004940 +2009_004942 +2009_004943 +2009_004944 +2009_004945 +2009_004946 +2009_004947 +2009_004953 +2009_004956 +2009_004958 +2009_004959 +2009_004961 +2009_004962 +2009_004965 +2009_004969 +2009_004971 +2009_004972 +2009_004974 +2009_004975 +2009_004977 +2009_004979 +2009_004980 +2009_004982 +2009_004983 +2009_004984 +2009_004986 +2009_004987 +2009_004988 +2009_004990 +2009_004993 +2009_004994 +2009_004996 +2009_004999 +2009_005000 +2009_005001 +2009_005005 +2009_005006 +2009_005008 +2009_005015 +2009_005016 +2009_005019 +2009_005024 +2009_005025 +2009_005030 +2009_005031 +2009_005033 +2009_005035 +2009_005036 +2009_005037 +2009_005038 +2009_005040 +2009_005042 +2009_005044 +2009_005045 +2009_005051 +2009_005055 +2009_005056 +2009_005057 +2009_005060 +2009_005061 +2009_005062 +2009_005064 +2009_005068 +2009_005069 +2009_005070 +2009_005073 +2009_005075 +2009_005076 +2009_005078 +2009_005079 +2009_005080 +2009_005081 +2009_005082 +2009_005083 +2009_005084 +2009_005085 +2009_005086 +2009_005087 +2009_005089 +2009_005094 +2009_005095 +2009_005098 +2009_005102 +2009_005103 +2009_005104 +2009_005107 +2009_005111 +2009_005114 +2009_005118 +2009_005119 +2009_005120 +2009_005126 +2009_005127 +2009_005128 +2009_005129 +2009_005130 +2009_005131 +2009_005133 +2009_005137 +2009_005140 +2009_005141 +2009_005142 +2009_005144 +2009_005145 +2009_005147 +2009_005148 +2009_005149 +2009_005150 +2009_005152 +2009_005153 +2009_005154 +2009_005155 +2009_005156 +2009_005158 +2009_005160 +2009_005161 +2009_005162 +2009_005163 +2009_005165 +2009_005168 +2009_005170 +2009_005171 +2009_005172 +2009_005177 +2009_005178 +2009_005181 +2009_005183 +2009_005185 +2009_005189 +2009_005190 +2009_005191 +2009_005193 +2009_005194 +2009_005198 +2009_005201 +2009_005202 +2009_005203 +2009_005204 +2009_005205 +2009_005210 +2009_005211 +2009_005215 +2009_005216 +2009_005217 +2009_005218 +2009_005219 +2009_005220 +2009_005221 +2009_005222 +2009_005225 +2009_005229 +2009_005231 +2009_005232 +2009_005234 +2009_005236 +2009_005239 +2009_005240 +2009_005242 +2009_005246 +2009_005247 +2009_005251 +2009_005256 +2009_005257 +2009_005260 +2009_005262 +2009_005263 +2009_005265 +2009_005267 +2009_005268 +2009_005269 +2009_005272 +2009_005278 +2009_005279 +2009_005282 +2009_005286 +2009_005287 +2009_005288 +2009_005292 +2009_005293 +2009_005294 +2009_005297 +2009_005299 +2009_005300 +2009_005302 +2009_005303 +2009_005307 +2009_005308 +2009_005309 +2009_005310 +2009_005311 +2010_000001 +2010_000002 +2010_000003 +2010_000009 +2010_000014 +2010_000015 +2010_000018 +2010_000020 +2010_000023 +2010_000024 +2010_000026 +2010_000027 +2010_000031 +2010_000033 +2010_000035 +2010_000036 +2010_000038 +2010_000043 +2010_000045 +2010_000048 +2010_000050 +2010_000052 +2010_000053 +2010_000054 +2010_000055 +2010_000056 +2010_000061 +2010_000063 +2010_000065 +2010_000067 +2010_000069 +2010_000071 +2010_000072 +2010_000073 +2010_000074 +2010_000075 +2010_000076 +2010_000079 +2010_000080 +2010_000082 +2010_000083 +2010_000084 +2010_000085 +2010_000087 +2010_000088 +2010_000089 +2010_000090 +2010_000091 +2010_000095 +2010_000097 +2010_000098 +2010_000099 +2010_000103 +2010_000109 +2010_000110 +2010_000111 +2010_000113 +2010_000114 +2010_000117 +2010_000118 +2010_000120 +2010_000124 +2010_000127 +2010_000131 +2010_000132 +2010_000133 +2010_000136 +2010_000137 +2010_000138 +2010_000139 +2010_000140 +2010_000141 +2010_000145 +2010_000148 +2010_000151 +2010_000152 +2010_000157 +2010_000159 +2010_000160 +2010_000162 +2010_000163 +2010_000165 +2010_000169 +2010_000170 +2010_000172 +2010_000174 +2010_000175 +2010_000177 +2010_000178 +2010_000182 +2010_000183 +2010_000184 +2010_000187 +2010_000189 +2010_000190 +2010_000193 +2010_000194 +2010_000195 +2010_000196 +2010_000197 +2010_000198 +2010_000199 +2010_000202 +2010_000203 +2010_000204 +2010_000209 +2010_000211 +2010_000213 +2010_000216 +2010_000218 +2010_000222 +2010_000224 +2010_000227 +2010_000229 +2010_000233 +2010_000234 +2010_000238 +2010_000241 +2010_000244 +2010_000245 +2010_000246 +2010_000247 +2010_000248 +2010_000249 +2010_000250 +2010_000254 +2010_000255 +2010_000256 +2010_000260 +2010_000261 +2010_000262 +2010_000263 +2010_000264 +2010_000266 +2010_000269 +2010_000270 +2010_000272 +2010_000273 +2010_000276 +2010_000279 +2010_000283 +2010_000284 +2010_000285 +2010_000286 +2010_000291 +2010_000293 +2010_000295 +2010_000296 +2010_000299 +2010_000302 +2010_000303 +2010_000307 +2010_000308 +2010_000309 +2010_000310 +2010_000312 +2010_000313 +2010_000317 +2010_000318 +2010_000320 +2010_000321 +2010_000323 +2010_000324 +2010_000325 +2010_000327 +2010_000329 +2010_000330 +2010_000335 +2010_000336 +2010_000337 +2010_000342 +2010_000344 +2010_000347 +2010_000349 +2010_000351 +2010_000352 +2010_000356 +2010_000358 +2010_000361 +2010_000362 +2010_000370 +2010_000371 +2010_000372 +2010_000374 +2010_000375 +2010_000376 +2010_000377 +2010_000379 +2010_000381 +2010_000382 +2010_000384 +2010_000386 +2010_000388 +2010_000389 +2010_000390 +2010_000392 +2010_000393 +2010_000394 +2010_000395 +2010_000399 +2010_000401 +2010_000404 +2010_000406 +2010_000409 +2010_000413 +2010_000415 +2010_000418 +2010_000419 +2010_000420 +2010_000422 +2010_000426 +2010_000427 +2010_000431 +2010_000432 +2010_000433 +2010_000435 +2010_000436 +2010_000437 +2010_000439 +2010_000442 +2010_000444 +2010_000446 +2010_000447 +2010_000448 +2010_000449 +2010_000453 +2010_000456 +2010_000458 +2010_000459 +2010_000461 +2010_000462 +2010_000463 +2010_000465 +2010_000466 +2010_000468 +2010_000469 +2010_000470 +2010_000473 +2010_000474 +2010_000475 +2010_000477 +2010_000480 +2010_000483 +2010_000484 +2010_000485 +2010_000488 +2010_000490 +2010_000492 +2010_000493 +2010_000495 +2010_000497 +2010_000498 +2010_000500 +2010_000502 +2010_000503 +2010_000506 +2010_000508 +2010_000510 +2010_000511 +2010_000513 +2010_000515 +2010_000519 +2010_000522 +2010_000524 +2010_000526 +2010_000527 +2010_000530 +2010_000534 +2010_000536 +2010_000537 +2010_000538 +2010_000541 +2010_000545 +2010_000547 +2010_000548 +2010_000549 +2010_000552 +2010_000553 +2010_000556 +2010_000557 +2010_000559 +2010_000561 +2010_000562 +2010_000564 +2010_000567 +2010_000568 +2010_000571 +2010_000572 +2010_000573 +2010_000574 +2010_000576 +2010_000577 +2010_000578 +2010_000581 +2010_000582 +2010_000583 +2010_000586 +2010_000588 +2010_000590 +2010_000591 +2010_000601 +2010_000602 +2010_000603 +2010_000604 +2010_000608 +2010_000613 +2010_000616 +2010_000617 +2010_000621 +2010_000622 +2010_000624 +2010_000626 +2010_000628 +2010_000630 +2010_000632 +2010_000633 +2010_000635 +2010_000639 +2010_000641 +2010_000644 +2010_000645 +2010_000646 +2010_000647 +2010_000648 +2010_000651 +2010_000655 +2010_000658 +2010_000661 +2010_000664 +2010_000665 +2010_000666 +2010_000667 +2010_000669 +2010_000671 +2010_000674 +2010_000675 +2010_000678 +2010_000679 +2010_000681 +2010_000682 +2010_000683 +2010_000685 +2010_000687 +2010_000688 +2010_000689 +2010_000690 +2010_000691 +2010_000692 +2010_000694 +2010_000695 +2010_000697 +2010_000702 +2010_000705 +2010_000707 +2010_000710 +2010_000711 +2010_000712 +2010_000715 +2010_000716 +2010_000717 +2010_000721 +2010_000722 +2010_000723 +2010_000724 +2010_000726 +2010_000727 +2010_000729 +2010_000731 +2010_000735 +2010_000737 +2010_000738 +2010_000739 +2010_000740 +2010_000743 +2010_000744 +2010_000746 +2010_000747 +2010_000748 +2010_000749 +2010_000750 +2010_000754 +2010_000759 +2010_000760 +2010_000761 +2010_000764 +2010_000765 +2010_000769 +2010_000770 +2010_000771 +2010_000772 +2010_000773 +2010_000778 +2010_000782 +2010_000785 +2010_000786 +2010_000787 +2010_000788 +2010_000791 +2010_000792 +2010_000797 +2010_000799 +2010_000800 +2010_000802 +2010_000803 +2010_000805 +2010_000806 +2010_000807 +2010_000808 +2010_000810 +2010_000811 +2010_000814 +2010_000815 +2010_000821 +2010_000822 +2010_000827 +2010_000828 +2010_000829 +2010_000830 +2010_000831 +2010_000836 +2010_000837 +2010_000838 +2010_000842 +2010_000846 +2010_000847 +2010_000849 +2010_000855 +2010_000857 +2010_000860 +2010_000862 +2010_000863 +2010_000865 +2010_000866 +2010_000870 +2010_000871 +2010_000872 +2010_000874 +2010_000875 +2010_000876 +2010_000879 +2010_000883 +2010_000885 +2010_000887 +2010_000889 +2010_000891 +2010_000893 +2010_000897 +2010_000898 +2010_000899 +2010_000904 +2010_000906 +2010_000907 +2010_000908 +2010_000910 +2010_000912 +2010_000914 +2010_000915 +2010_000918 +2010_000920 +2010_000922 +2010_000923 +2010_000926 +2010_000927 +2010_000928 +2010_000929 +2010_000931 +2010_000938 +2010_000939 +2010_000941 +2010_000942 +2010_000944 +2010_000945 +2010_000947 +2010_000948 +2010_000952 +2010_000954 +2010_000955 +2010_000956 +2010_000959 +2010_000961 +2010_000968 +2010_000970 +2010_000971 +2010_000973 +2010_000974 +2010_000975 +2010_000978 +2010_000979 +2010_000981 +2010_000983 +2010_000984 +2010_000986 +2010_000989 +2010_000991 +2010_000993 +2010_000994 +2010_000995 +2010_000996 +2010_001000 +2010_001002 +2010_001006 +2010_001008 +2010_001009 +2010_001010 +2010_001011 +2010_001012 +2010_001013 +2010_001016 +2010_001017 +2010_001020 +2010_001021 +2010_001023 +2010_001024 +2010_001025 +2010_001030 +2010_001032 +2010_001036 +2010_001039 +2010_001042 +2010_001043 +2010_001044 +2010_001049 +2010_001051 +2010_001052 +2010_001054 +2010_001057 +2010_001061 +2010_001063 +2010_001066 +2010_001069 +2010_001070 +2010_001074 +2010_001076 +2010_001077 +2010_001079 +2010_001080 +2010_001082 +2010_001085 +2010_001087 +2010_001089 +2010_001092 +2010_001094 +2010_001098 +2010_001099 +2010_001100 +2010_001103 +2010_001104 +2010_001105 +2010_001106 +2010_001107 +2010_001109 +2010_001110 +2010_001111 +2010_001112 +2010_001113 +2010_001117 +2010_001118 +2010_001119 +2010_001120 +2010_001121 +2010_001123 +2010_001124 +2010_001125 +2010_001126 +2010_001127 +2010_001130 +2010_001131 +2010_001134 +2010_001139 +2010_001140 +2010_001142 +2010_001143 +2010_001147 +2010_001148 +2010_001149 +2010_001151 +2010_001152 +2010_001154 +2010_001158 +2010_001159 +2010_001160 +2010_001163 +2010_001164 +2010_001172 +2010_001174 +2010_001175 +2010_001177 +2010_001179 +2010_001181 +2010_001183 +2010_001184 +2010_001185 +2010_001188 +2010_001189 +2010_001192 +2010_001193 +2010_001195 +2010_001199 +2010_001201 +2010_001204 +2010_001205 +2010_001206 +2010_001210 +2010_001211 +2010_001212 +2010_001214 +2010_001215 +2010_001216 +2010_001218 +2010_001219 +2010_001220 +2010_001224 +2010_001225 +2010_001229 +2010_001234 +2010_001237 +2010_001240 +2010_001241 +2010_001242 +2010_001245 +2010_001246 +2010_001247 +2010_001250 +2010_001251 +2010_001253 +2010_001254 +2010_001256 +2010_001257 +2010_001261 +2010_001263 +2010_001264 +2010_001270 +2010_001271 +2010_001272 +2010_001273 +2010_001274 +2010_001275 +2010_001277 +2010_001279 +2010_001282 +2010_001286 +2010_001287 +2010_001288 +2010_001289 +2010_001291 +2010_001292 +2010_001293 +2010_001294 +2010_001299 +2010_001301 +2010_001305 +2010_001310 +2010_001311 +2010_001312 +2010_001313 +2010_001315 +2010_001317 +2010_001320 +2010_001321 +2010_001325 +2010_001326 +2010_001327 +2010_001328 +2010_001329 +2010_001331 +2010_001333 +2010_001337 +2010_001338 +2010_001339 +2010_001343 +2010_001344 +2010_001347 +2010_001351 +2010_001355 +2010_001356 +2010_001357 +2010_001360 +2010_001361 +2010_001363 +2010_001364 +2010_001366 +2010_001367 +2010_001370 +2010_001372 +2010_001374 +2010_001376 +2010_001382 +2010_001383 +2010_001385 +2010_001386 +2010_001390 +2010_001394 +2010_001395 +2010_001397 +2010_001399 +2010_001401 +2010_001402 +2010_001403 +2010_001405 +2010_001406 +2010_001407 +2010_001408 +2010_001410 +2010_001411 +2010_001412 +2010_001413 +2010_001417 +2010_001418 +2010_001421 +2010_001422 +2010_001425 +2010_001426 +2010_001430 +2010_001431 +2010_001432 +2010_001433 +2010_001434 +2010_001435 +2010_001439 +2010_001441 +2010_001448 +2010_001449 +2010_001450 +2010_001451 +2010_001452 +2010_001453 +2010_001455 +2010_001456 +2010_001457 +2010_001458 +2010_001461 +2010_001463 +2010_001464 +2010_001465 +2010_001468 +2010_001472 +2010_001473 +2010_001478 +2010_001479 +2010_001480 +2010_001481 +2010_001486 +2010_001487 +2010_001489 +2010_001497 +2010_001499 +2010_001501 +2010_001502 +2010_001503 +2010_001505 +2010_001511 +2010_001514 +2010_001515 +2010_001516 +2010_001518 +2010_001520 +2010_001522 +2010_001525 +2010_001528 +2010_001529 +2010_001533 +2010_001534 +2010_001535 +2010_001536 +2010_001537 +2010_001539 +2010_001540 +2010_001543 +2010_001544 +2010_001547 +2010_001548 +2010_001550 +2010_001551 +2010_001552 +2010_001553 +2010_001555 +2010_001557 +2010_001560 +2010_001561 +2010_001562 +2010_001563 +2010_001569 +2010_001571 +2010_001572 +2010_001574 +2010_001576 +2010_001577 +2010_001579 +2010_001580 +2010_001583 +2010_001584 +2010_001586 +2010_001587 +2010_001590 +2010_001592 +2010_001594 +2010_001595 +2010_001596 +2010_001599 +2010_001601 +2010_001602 +2010_001603 +2010_001606 +2010_001607 +2010_001608 +2010_001614 +2010_001618 +2010_001619 +2010_001625 +2010_001626 +2010_001630 +2010_001633 +2010_001635 +2010_001636 +2010_001637 +2010_001638 +2010_001640 +2010_001644 +2010_001645 +2010_001646 +2010_001647 +2010_001649 +2010_001650 +2010_001652 +2010_001656 +2010_001659 +2010_001660 +2010_001665 +2010_001668 +2010_001669 +2010_001671 +2010_001674 +2010_001675 +2010_001676 +2010_001679 +2010_001680 +2010_001682 +2010_001685 +2010_001687 +2010_001689 +2010_001690 +2010_001691 +2010_001692 +2010_001694 +2010_001697 +2010_001698 +2010_001699 +2010_001700 +2010_001705 +2010_001706 +2010_001709 +2010_001710 +2010_001712 +2010_001715 +2010_001717 +2010_001718 +2010_001719 +2010_001720 +2010_001726 +2010_001729 +2010_001731 +2010_001732 +2010_001734 +2010_001737 +2010_001739 +2010_001743 +2010_001744 +2010_001746 +2010_001747 +2010_001748 +2010_001749 +2010_001752 +2010_001753 +2010_001754 +2010_001756 +2010_001757 +2010_001759 +2010_001760 +2010_001762 +2010_001763 +2010_001767 +2010_001768 +2010_001770 +2010_001771 +2010_001773 +2010_001776 +2010_001777 +2010_001780 +2010_001783 +2010_001784 +2010_001785 +2010_001787 +2010_001788 +2010_001794 +2010_001795 +2010_001796 +2010_001797 +2010_001801 +2010_001803 +2010_001806 +2010_001807 +2010_001808 +2010_001810 +2010_001814 +2010_001817 +2010_001819 +2010_001820 +2010_001821 +2010_001823 +2010_001827 +2010_001828 +2010_001829 +2010_001830 +2010_001836 +2010_001837 +2010_001838 +2010_001841 +2010_001842 +2010_001843 +2010_001845 +2010_001846 +2010_001849 +2010_001850 +2010_001851 +2010_001852 +2010_001853 +2010_001856 +2010_001857 +2010_001858 +2010_001860 +2010_001862 +2010_001863 +2010_001864 +2010_001868 +2010_001869 +2010_001870 +2010_001877 +2010_001881 +2010_001884 +2010_001885 +2010_001891 +2010_001892 +2010_001893 +2010_001896 +2010_001899 +2010_001904 +2010_001907 +2010_001908 +2010_001911 +2010_001913 +2010_001916 +2010_001918 +2010_001919 +2010_001921 +2010_001922 +2010_001923 +2010_001924 +2010_001927 +2010_001929 +2010_001931 +2010_001933 +2010_001934 +2010_001937 +2010_001938 +2010_001939 +2010_001940 +2010_001941 +2010_001944 +2010_001948 +2010_001950 +2010_001951 +2010_001954 +2010_001956 +2010_001957 +2010_001960 +2010_001962 +2010_001966 +2010_001967 +2010_001968 +2010_001970 +2010_001973 +2010_001974 +2010_001976 +2010_001978 +2010_001979 +2010_001980 +2010_001981 +2010_001982 +2010_001986 +2010_001987 +2010_001988 +2010_001992 +2010_001993 +2010_001994 +2010_001995 +2010_001998 +2010_002000 +2010_002002 +2010_002005 +2010_002006 +2010_002015 +2010_002017 +2010_002018 +2010_002019 +2010_002020 +2010_002022 +2010_002023 +2010_002025 +2010_002026 +2010_002029 +2010_002030 +2010_002032 +2010_002037 +2010_002039 +2010_002040 +2010_002041 +2010_002042 +2010_002044 +2010_002045 +2010_002046 +2010_002047 +2010_002048 +2010_002050 +2010_002054 +2010_002055 +2010_002057 +2010_002058 +2010_002060 +2010_002065 +2010_002067 +2010_002068 +2010_002070 +2010_002073 +2010_002080 +2010_002085 +2010_002086 +2010_002089 +2010_002094 +2010_002095 +2010_002096 +2010_002097 +2010_002098 +2010_002100 +2010_002102 +2010_002104 +2010_002105 +2010_002106 +2010_002107 +2010_002113 +2010_002117 +2010_002118 +2010_002121 +2010_002124 +2010_002127 +2010_002128 +2010_002129 +2010_002130 +2010_002132 +2010_002133 +2010_002136 +2010_002137 +2010_002138 +2010_002139 +2010_002141 +2010_002142 +2010_002143 +2010_002146 +2010_002147 +2010_002149 +2010_002150 +2010_002152 +2010_002154 +2010_002161 +2010_002166 +2010_002167 +2010_002168 +2010_002172 +2010_002175 +2010_002176 +2010_002177 +2010_002179 +2010_002180 +2010_002181 +2010_002182 +2010_002183 +2010_002185 +2010_002187 +2010_002191 +2010_002192 +2010_002193 +2010_002194 +2010_002195 +2010_002199 +2010_002200 +2010_002203 +2010_002204 +2010_002207 +2010_002208 +2010_002211 +2010_002213 +2010_002215 +2010_002216 +2010_002218 +2010_002219 +2010_002220 +2010_002221 +2010_002223 +2010_002224 +2010_002226 +2010_002227 +2010_002228 +2010_002229 +2010_002232 +2010_002236 +2010_002242 +2010_002243 +2010_002244 +2010_002245 +2010_002247 +2010_002248 +2010_002251 +2010_002254 +2010_002255 +2010_002261 +2010_002263 +2010_002267 +2010_002269 +2010_002271 +2010_002274 +2010_002276 +2010_002278 +2010_002279 +2010_002283 +2010_002286 +2010_002287 +2010_002289 +2010_002294 +2010_002295 +2010_002299 +2010_002301 +2010_002303 +2010_002305 +2010_002307 +2010_002309 +2010_002310 +2010_002312 +2010_002313 +2010_002315 +2010_002316 +2010_002318 +2010_002319 +2010_002320 +2010_002321 +2010_002326 +2010_002327 +2010_002332 +2010_002333 +2010_002336 +2010_002337 +2010_002338 +2010_002340 +2010_002346 +2010_002348 +2010_002349 +2010_002353 +2010_002354 +2010_002356 +2010_002357 +2010_002361 +2010_002363 +2010_002364 +2010_002365 +2010_002366 +2010_002368 +2010_002369 +2010_002370 +2010_002371 +2010_002372 +2010_002373 +2010_002374 +2010_002378 +2010_002379 +2010_002382 +2010_002383 +2010_002387 +2010_002388 +2010_002390 +2010_002391 +2010_002392 +2010_002393 +2010_002396 +2010_002398 +2010_002399 +2010_002400 +2010_002402 +2010_002405 +2010_002406 +2010_002408 +2010_002409 +2010_002410 +2010_002413 +2010_002418 +2010_002420 +2010_002422 +2010_002424 +2010_002425 +2010_002427 +2010_002429 +2010_002431 +2010_002435 +2010_002436 +2010_002438 +2010_002439 +2010_002440 +2010_002445 +2010_002446 +2010_002448 +2010_002449 +2010_002450 +2010_002452 +2010_002455 +2010_002456 +2010_002457 +2010_002458 +2010_002459 +2010_002460 +2010_002461 +2010_002462 +2010_002468 +2010_002469 +2010_002472 +2010_002475 +2010_002479 +2010_002480 +2010_002482 +2010_002484 +2010_002485 +2010_002487 +2010_002492 +2010_002496 +2010_002497 +2010_002498 +2010_002499 +2010_002501 +2010_002504 +2010_002507 +2010_002509 +2010_002510 +2010_002512 +2010_002513 +2010_002516 +2010_002518 +2010_002520 +2010_002526 +2010_002527 +2010_002529 +2010_002531 +2010_002532 +2010_002533 +2010_002534 +2010_002536 +2010_002537 +2010_002538 +2010_002539 +2010_002542 +2010_002543 +2010_002546 +2010_002547 +2010_002551 +2010_002552 +2010_002553 +2010_002556 +2010_002561 +2010_002562 +2010_002565 +2010_002567 +2010_002569 +2010_002570 +2010_002573 +2010_002575 +2010_002577 +2010_002578 +2010_002579 +2010_002580 +2010_002582 +2010_002583 +2010_002586 +2010_002587 +2010_002589 +2010_002592 +2010_002594 +2010_002597 +2010_002598 +2010_002601 +2010_002602 +2010_002603 +2010_002605 +2010_002614 +2010_002615 +2010_002616 +2010_002618 +2010_002620 +2010_002621 +2010_002623 +2010_002624 +2010_002625 +2010_002626 +2010_002628 +2010_002629 +2010_002631 +2010_002632 +2010_002638 +2010_002639 +2010_002642 +2010_002644 +2010_002645 +2010_002647 +2010_002652 +2010_002653 +2010_002654 +2010_002656 +2010_002659 +2010_002660 +2010_002661 +2010_002662 +2010_002665 +2010_002666 +2010_002667 +2010_002668 +2010_002674 +2010_002675 +2010_002676 +2010_002678 +2010_002679 +2010_002682 +2010_002684 +2010_002686 +2010_002688 +2010_002691 +2010_002692 +2010_002693 +2010_002695 +2010_002696 +2010_002697 +2010_002701 +2010_002702 +2010_002704 +2010_002705 +2010_002708 +2010_002710 +2010_002713 +2010_002714 +2010_002716 +2010_002720 +2010_002721 +2010_002722 +2010_002723 +2010_002725 +2010_002728 +2010_002729 +2010_002733 +2010_002734 +2010_002736 +2010_002737 +2010_002740 +2010_002741 +2010_002742 +2010_002746 +2010_002747 +2010_002750 +2010_002752 +2010_002754 +2010_002758 +2010_002759 +2010_002760 +2010_002763 +2010_002767 +2010_002770 +2010_002771 +2010_002772 +2010_002774 +2010_002775 +2010_002778 +2010_002779 +2010_002780 +2010_002781 +2010_002783 +2010_002786 +2010_002789 +2010_002790 +2010_002791 +2010_002792 +2010_002793 +2010_002794 +2010_002797 +2010_002801 +2010_002803 +2010_002805 +2010_002807 +2010_002808 +2010_002811 +2010_002813 +2010_002814 +2010_002815 +2010_002816 +2010_002817 +2010_002820 +2010_002821 +2010_002822 +2010_002824 +2010_002827 +2010_002830 +2010_002831 +2010_002834 +2010_002838 +2010_002839 +2010_002840 +2010_002841 +2010_002842 +2010_002843 +2010_002844 +2010_002845 +2010_002851 +2010_002853 +2010_002854 +2010_002855 +2010_002856 +2010_002857 +2010_002858 +2010_002860 +2010_002864 +2010_002865 +2010_002868 +2010_002870 +2010_002871 +2010_002873 +2010_002876 +2010_002877 +2010_002879 +2010_002880 +2010_002881 +2010_002884 +2010_002887 +2010_002891 +2010_002892 +2010_002896 +2010_002899 +2010_002900 +2010_002901 +2010_002902 +2010_002903 +2010_002905 +2010_002907 +2010_002909 +2010_002914 +2010_002915 +2010_002917 +2010_002921 +2010_002924 +2010_002927 +2010_002929 +2010_002930 +2010_002931 +2010_002935 +2010_002937 +2010_002938 +2010_002939 +2010_002940 +2010_002941 +2010_002946 +2010_002947 +2010_002948 +2010_002954 +2010_002955 +2010_002956 +2010_002958 +2010_002960 +2010_002962 +2010_002963 +2010_002965 +2010_002972 +2010_002973 +2010_002976 +2010_002978 +2010_002979 +2010_002980 +2010_002982 +2010_002985 +2010_002987 +2010_002988 +2010_002990 +2010_002991 +2010_002993 +2010_002995 +2010_003002 +2010_003003 +2010_003007 +2010_003010 +2010_003011 +2010_003013 +2010_003014 +2010_003015 +2010_003016 +2010_003017 +2010_003019 +2010_003024 +2010_003025 +2010_003027 +2010_003028 +2010_003032 +2010_003034 +2010_003035 +2010_003037 +2010_003040 +2010_003043 +2010_003044 +2010_003047 +2010_003050 +2010_003051 +2010_003053 +2010_003054 +2010_003055 +2010_003056 +2010_003057 +2010_003060 +2010_003062 +2010_003067 +2010_003071 +2010_003072 +2010_003074 +2010_003077 +2010_003078 +2010_003081 +2010_003082 +2010_003084 +2010_003086 +2010_003088 +2010_003091 +2010_003092 +2010_003093 +2010_003094 +2010_003097 +2010_003098 +2010_003101 +2010_003102 +2010_003103 +2010_003106 +2010_003107 +2010_003108 +2010_003112 +2010_003114 +2010_003115 +2010_003117 +2010_003119 +2010_003120 +2010_003122 +2010_003123 +2010_003127 +2010_003129 +2010_003132 +2010_003133 +2010_003135 +2010_003137 +2010_003138 +2010_003139 +2010_003143 +2010_003146 +2010_003147 +2010_003148 +2010_003149 +2010_003151 +2010_003153 +2010_003154 +2010_003156 +2010_003157 +2010_003159 +2010_003160 +2010_003162 +2010_003168 +2010_003169 +2010_003170 +2010_003173 +2010_003174 +2010_003176 +2010_003179 +2010_003183 +2010_003185 +2010_003186 +2010_003187 +2010_003190 +2010_003191 +2010_003192 +2010_003197 +2010_003199 +2010_003200 +2010_003201 +2010_003203 +2010_003204 +2010_003206 +2010_003207 +2010_003212 +2010_003214 +2010_003218 +2010_003219 +2010_003220 +2010_003222 +2010_003223 +2010_003227 +2010_003230 +2010_003231 +2010_003232 +2010_003233 +2010_003236 +2010_003238 +2010_003239 +2010_003240 +2010_003241 +2010_003244 +2010_003248 +2010_003249 +2010_003250 +2010_003251 +2010_003252 +2010_003253 +2010_003255 +2010_003256 +2010_003257 +2010_003259 +2010_003260 +2010_003263 +2010_003264 +2010_003269 +2010_003270 +2010_003274 +2010_003275 +2010_003276 +2010_003278 +2010_003279 +2010_003280 +2010_003283 +2010_003285 +2010_003287 +2010_003290 +2010_003291 +2010_003293 +2010_003297 +2010_003299 +2010_003300 +2010_003301 +2010_003302 +2010_003303 +2010_003304 +2010_003305 +2010_003309 +2010_003314 +2010_003316 +2010_003321 +2010_003325 +2010_003326 +2010_003329 +2010_003331 +2010_003332 +2010_003333 +2010_003335 +2010_003337 +2010_003341 +2010_003342 +2010_003343 +2010_003344 +2010_003345 +2010_003350 +2010_003351 +2010_003353 +2010_003355 +2010_003358 +2010_003361 +2010_003362 +2010_003365 +2010_003366 +2010_003367 +2010_003368 +2010_003370 +2010_003371 +2010_003372 +2010_003374 +2010_003375 +2010_003376 +2010_003379 +2010_003380 +2010_003381 +2010_003383 +2010_003384 +2010_003385 +2010_003390 +2010_003391 +2010_003395 +2010_003397 +2010_003398 +2010_003400 +2010_003401 +2010_003402 +2010_003405 +2010_003406 +2010_003409 +2010_003411 +2010_003415 +2010_003418 +2010_003419 +2010_003421 +2010_003427 +2010_003429 +2010_003432 +2010_003435 +2010_003436 +2010_003437 +2010_003439 +2010_003446 +2010_003450 +2010_003451 +2010_003453 +2010_003458 +2010_003461 +2010_003465 +2010_003467 +2010_003468 +2010_003469 +2010_003470 +2010_003473 +2010_003474 +2010_003477 +2010_003478 +2010_003479 +2010_003481 +2010_003482 +2010_003483 +2010_003488 +2010_003490 +2010_003491 +2010_003493 +2010_003495 +2010_003496 +2010_003497 +2010_003503 +2010_003506 +2010_003507 +2010_003508 +2010_003509 +2010_003512 +2010_003513 +2010_003514 +2010_003520 +2010_003522 +2010_003526 +2010_003527 +2010_003529 +2010_003531 +2010_003532 +2010_003534 +2010_003535 +2010_003537 +2010_003538 +2010_003539 +2010_003540 +2010_003541 +2010_003546 +2010_003547 +2010_003549 +2010_003551 +2010_003554 +2010_003556 +2010_003559 +2010_003560 +2010_003561 +2010_003562 +2010_003563 +2010_003567 +2010_003568 +2010_003569 +2010_003573 +2010_003574 +2010_003576 +2010_003579 +2010_003582 +2010_003585 +2010_003588 +2010_003592 +2010_003594 +2010_003597 +2010_003598 +2010_003599 +2010_003601 +2010_003603 +2010_003604 +2010_003605 +2010_003608 +2010_003609 +2010_003610 +2010_003612 +2010_003613 +2010_003618 +2010_003625 +2010_003628 +2010_003629 +2010_003630 +2010_003632 +2010_003634 +2010_003635 +2010_003640 +2010_003641 +2010_003643 +2010_003644 +2010_003645 +2010_003648 +2010_003649 +2010_003651 +2010_003653 +2010_003655 +2010_003656 +2010_003659 +2010_003664 +2010_003665 +2010_003667 +2010_003670 +2010_003671 +2010_003672 +2010_003673 +2010_003674 +2010_003675 +2010_003677 +2010_003679 +2010_003680 +2010_003686 +2010_003687 +2010_003688 +2010_003689 +2010_003690 +2010_003695 +2010_003696 +2010_003701 +2010_003703 +2010_003708 +2010_003709 +2010_003714 +2010_003716 +2010_003717 +2010_003719 +2010_003721 +2010_003723 +2010_003724 +2010_003725 +2010_003728 +2010_003729 +2010_003730 +2010_003731 +2010_003734 +2010_003735 +2010_003736 +2010_003737 +2010_003742 +2010_003743 +2010_003744 +2010_003745 +2010_003746 +2010_003747 +2010_003752 +2010_003754 +2010_003755 +2010_003757 +2010_003758 +2010_003761 +2010_003762 +2010_003764 +2010_003768 +2010_003770 +2010_003771 +2010_003772 +2010_003773 +2010_003774 +2010_003779 +2010_003781 +2010_003784 +2010_003788 +2010_003789 +2010_003791 +2010_003792 +2010_003798 +2010_003799 +2010_003800 +2010_003801 +2010_003804 +2010_003805 +2010_003806 +2010_003807 +2010_003811 +2010_003813 +2010_003815 +2010_003816 +2010_003818 +2010_003820 +2010_003821 +2010_003822 +2010_003823 +2010_003825 +2010_003826 +2010_003828 +2010_003837 +2010_003844 +2010_003845 +2010_003847 +2010_003848 +2010_003852 +2010_003854 +2010_003855 +2010_003856 +2010_003857 +2010_003859 +2010_003860 +2010_003861 +2010_003863 +2010_003864 +2010_003865 +2010_003871 +2010_003874 +2010_003875 +2010_003877 +2010_003878 +2010_003879 +2010_003884 +2010_003887 +2010_003890 +2010_003891 +2010_003892 +2010_003893 +2010_003894 +2010_003897 +2010_003898 +2010_003899 +2010_003900 +2010_003906 +2010_003910 +2010_003911 +2010_003912 +2010_003914 +2010_003915 +2010_003919 +2010_003920 +2010_003925 +2010_003928 +2010_003929 +2010_003931 +2010_003933 +2010_003936 +2010_003937 +2010_003938 +2010_003939 +2010_003942 +2010_003943 +2010_003944 +2010_003945 +2010_003947 +2010_003949 +2010_003950 +2010_003954 +2010_003955 +2010_003956 +2010_003957 +2010_003958 +2010_003961 +2010_003966 +2010_003970 +2010_003971 +2010_003974 +2010_003976 +2010_003980 +2010_003981 +2010_003982 +2010_003983 +2010_003987 +2010_003988 +2010_003994 +2010_003995 +2010_003996 +2010_003999 +2010_004002 +2010_004005 +2010_004006 +2010_004007 +2010_004008 +2010_004009 +2010_004010 +2010_004011 +2010_004014 +2010_004017 +2010_004021 +2010_004023 +2010_004025 +2010_004026 +2010_004027 +2010_004028 +2010_004029 +2010_004030 +2010_004031 +2010_004033 +2010_004036 +2010_004037 +2010_004041 +2010_004042 +2010_004043 +2010_004045 +2010_004048 +2010_004050 +2010_004052 +2010_004053 +2010_004054 +2010_004056 +2010_004059 +2010_004060 +2010_004061 +2010_004062 +2010_004063 +2010_004064 +2010_004065 +2010_004066 +2010_004067 +2010_004069 +2010_004071 +2010_004072 +2010_004073 +2010_004074 +2010_004075 +2010_004081 +2010_004084 +2010_004088 +2010_004089 +2010_004092 +2010_004094 +2010_004095 +2010_004096 +2010_004102 +2010_004104 +2010_004105 +2010_004107 +2010_004108 +2010_004109 +2010_004111 +2010_004116 +2010_004118 +2010_004119 +2010_004120 +2010_004121 +2010_004123 +2010_004124 +2010_004125 +2010_004129 +2010_004130 +2010_004133 +2010_004137 +2010_004138 +2010_004139 +2010_004140 +2010_004141 +2010_004143 +2010_004144 +2010_004145 +2010_004148 +2010_004149 +2010_004154 +2010_004157 +2010_004160 +2010_004161 +2010_004162 +2010_004163 +2010_004165 +2010_004168 +2010_004171 +2010_004172 +2010_004173 +2010_004175 +2010_004178 +2010_004179 +2010_004180 +2010_004182 +2010_004184 +2010_004186 +2010_004187 +2010_004188 +2010_004191 +2010_004192 +2010_004193 +2010_004197 +2010_004198 +2010_004201 +2010_004204 +2010_004207 +2010_004208 +2010_004209 +2010_004210 +2010_004211 +2010_004216 +2010_004219 +2010_004222 +2010_004223 +2010_004224 +2010_004225 +2010_004226 +2010_004227 +2010_004228 +2010_004229 +2010_004230 +2010_004231 +2010_004238 +2010_004239 +2010_004242 +2010_004244 +2010_004247 +2010_004248 +2010_004249 +2010_004252 +2010_004253 +2010_004254 +2010_004256 +2010_004257 +2010_004258 +2010_004259 +2010_004263 +2010_004264 +2010_004271 +2010_004275 +2010_004276 +2010_004278 +2010_004279 +2010_004280 +2010_004282 +2010_004283 +2010_004286 +2010_004288 +2010_004289 +2010_004290 +2010_004291 +2010_004295 +2010_004296 +2010_004297 +2010_004301 +2010_004304 +2010_004306 +2010_004307 +2010_004311 +2010_004312 +2010_004313 +2010_004314 +2010_004318 +2010_004320 +2010_004322 +2010_004325 +2010_004327 +2010_004332 +2010_004333 +2010_004335 +2010_004336 +2010_004337 +2010_004339 +2010_004341 +2010_004344 +2010_004345 +2010_004346 +2010_004348 +2010_004349 +2010_004350 +2010_004351 +2010_004352 +2010_004355 +2010_004357 +2010_004358 +2010_004360 +2010_004361 +2010_004362 +2010_004363 +2010_004365 +2010_004366 +2010_004367 +2010_004368 +2010_004369 +2010_004370 +2010_004371 +2010_004373 +2010_004374 +2010_004380 +2010_004382 +2010_004385 +2010_004387 +2010_004390 +2010_004391 +2010_004400 +2010_004402 +2010_004404 +2010_004409 +2010_004412 +2010_004415 +2010_004417 +2010_004419 +2010_004420 +2010_004422 +2010_004423 +2010_004425 +2010_004428 +2010_004429 +2010_004431 +2010_004432 +2010_004436 +2010_004439 +2010_004441 +2010_004445 +2010_004447 +2010_004448 +2010_004450 +2010_004451 +2010_004455 +2010_004456 +2010_004457 +2010_004459 +2010_004460 +2010_004461 +2010_004466 +2010_004467 +2010_004469 +2010_004472 +2010_004475 +2010_004476 +2010_004477 +2010_004478 +2010_004479 +2010_004481 +2010_004483 +2010_004484 +2010_004486 +2010_004488 +2010_004491 +2010_004492 +2010_004493 +2010_004499 +2010_004501 +2010_004503 +2010_004505 +2010_004506 +2010_004509 +2010_004511 +2010_004514 +2010_004515 +2010_004517 +2010_004518 +2010_004519 +2010_004520 +2010_004521 +2010_004523 +2010_004529 +2010_004533 +2010_004536 +2010_004537 +2010_004540 +2010_004542 +2010_004543 +2010_004545 +2010_004546 +2010_004550 +2010_004551 +2010_004553 +2010_004554 +2010_004556 +2010_004557 +2010_004558 +2010_004559 +2010_004560 +2010_004561 +2010_004567 +2010_004569 +2010_004570 +2010_004573 +2010_004575 +2010_004576 +2010_004577 +2010_004581 +2010_004584 +2010_004585 +2010_004586 +2010_004588 +2010_004591 +2010_004592 +2010_004594 +2010_004596 +2010_004597 +2010_004598 +2010_004600 +2010_004601 +2010_004604 +2010_004608 +2010_004609 +2010_004616 +2010_004618 +2010_004620 +2010_004621 +2010_004624 +2010_004625 +2010_004627 +2010_004628 +2010_004629 +2010_004631 +2010_004634 +2010_004635 +2010_004637 +2010_004638 +2010_004642 +2010_004646 +2010_004654 +2010_004655 +2010_004656 +2010_004657 +2010_004659 +2010_004660 +2010_004661 +2010_004662 +2010_004665 +2010_004666 +2010_004667 +2010_004669 +2010_004670 +2010_004672 +2010_004676 +2010_004677 +2010_004679 +2010_004680 +2010_004681 +2010_004683 +2010_004686 +2010_004690 +2010_004691 +2010_004692 +2010_004694 +2010_004696 +2010_004697 +2010_004698 +2010_004703 +2010_004704 +2010_004708 +2010_004710 +2010_004712 +2010_004714 +2010_004717 +2010_004721 +2010_004722 +2010_004726 +2010_004728 +2010_004729 +2010_004730 +2010_004733 +2010_004735 +2010_004738 +2010_004741 +2010_004743 +2010_004747 +2010_004748 +2010_004749 +2010_004750 +2010_004751 +2010_004753 +2010_004756 +2010_004757 +2010_004760 +2010_004763 +2010_004765 +2010_004766 +2010_004768 +2010_004770 +2010_004772 +2010_004773 +2010_004775 +2010_004777 +2010_004778 +2010_004779 +2010_004782 +2010_004783 +2010_004785 +2010_004786 +2010_004789 +2010_004791 +2010_004792 +2010_004793 +2010_004795 +2010_004797 +2010_004804 +2010_004805 +2010_004806 +2010_004807 +2010_004808 +2010_004809 +2010_004812 +2010_004813 +2010_004815 +2010_004816 +2010_004817 +2010_004821 +2010_004822 +2010_004824 +2010_004825 +2010_004826 +2010_004828 +2010_004829 +2010_004830 +2010_004831 +2010_004832 +2010_004836 +2010_004838 +2010_004841 +2010_004844 +2010_004847 +2010_004848 +2010_004849 +2010_004852 +2010_004854 +2010_004855 +2010_004856 +2010_004857 +2010_004861 +2010_004865 +2010_004866 +2010_004868 +2010_004871 +2010_004874 +2010_004877 +2010_004878 +2010_004879 +2010_004888 +2010_004889 +2010_004890 +2010_004891 +2010_004894 +2010_004896 +2010_004900 +2010_004901 +2010_004903 +2010_004906 +2010_004908 +2010_004909 +2010_004910 +2010_004913 +2010_004916 +2010_004917 +2010_004918 +2010_004919 +2010_004921 +2010_004922 +2010_004928 +2010_004930 +2010_004931 +2010_004933 +2010_004937 +2010_004938 +2010_004941 +2010_004942 +2010_004943 +2010_004944 +2010_004945 +2010_004946 +2010_004948 +2010_004950 +2010_004951 +2010_004952 +2010_004953 +2010_004954 +2010_004957 +2010_004959 +2010_004960 +2010_004962 +2010_004963 +2010_004966 +2010_004967 +2010_004968 +2010_004970 +2010_004971 +2010_004973 +2010_004974 +2010_004980 +2010_004982 +2010_004983 +2010_004987 +2010_004989 +2010_004991 +2010_004992 +2010_004994 +2010_004995 +2010_004997 +2010_004998 +2010_005000 +2010_005002 +2010_005005 +2010_005006 +2010_005008 +2010_005011 +2010_005013 +2010_005016 +2010_005017 +2010_005018 +2010_005019 +2010_005021 +2010_005022 +2010_005023 +2010_005026 +2010_005028 +2010_005031 +2010_005033 +2010_005035 +2010_005041 +2010_005042 +2010_005044 +2010_005046 +2010_005048 +2010_005049 +2010_005052 +2010_005053 +2010_005054 +2010_005055 +2010_005059 +2010_005060 +2010_005061 +2010_005062 +2010_005063 +2010_005064 +2010_005066 +2010_005068 +2010_005071 +2010_005072 +2010_005075 +2010_005079 +2010_005080 +2010_005082 +2010_005083 +2010_005087 +2010_005090 +2010_005093 +2010_005094 +2010_005096 +2010_005098 +2010_005099 +2010_005100 +2010_005101 +2010_005106 +2010_005107 +2010_005108 +2010_005109 +2010_005110 +2010_005111 +2010_005115 +2010_005116 +2010_005118 +2010_005119 +2010_005120 +2010_005123 +2010_005127 +2010_005128 +2010_005129 +2010_005130 +2010_005133 +2010_005134 +2010_005136 +2010_005138 +2010_005141 +2010_005143 +2010_005147 +2010_005148 +2010_005149 +2010_005152 +2010_005155 +2010_005158 +2010_005159 +2010_005160 +2010_005161 +2010_005164 +2010_005166 +2010_005167 +2010_005169 +2010_005170 +2010_005174 +2010_005180 +2010_005182 +2010_005183 +2010_005184 +2010_005185 +2010_005187 +2010_005188 +2010_005190 +2010_005192 +2010_005193 +2010_005198 +2010_005199 +2010_005201 +2010_005202 +2010_005206 +2010_005208 +2010_005211 +2010_005213 +2010_005215 +2010_005216 +2010_005217 +2010_005222 +2010_005223 +2010_005224 +2010_005226 +2010_005229 +2010_005230 +2010_005232 +2010_005236 +2010_005238 +2010_005239 +2010_005241 +2010_005242 +2010_005243 +2010_005245 +2010_005246 +2010_005250 +2010_005252 +2010_005253 +2010_005257 +2010_005258 +2010_005260 +2010_005261 +2010_005264 +2010_005266 +2010_005268 +2010_005270 +2010_005272 +2010_005273 +2010_005274 +2010_005275 +2010_005276 +2010_005277 +2010_005279 +2010_005284 +2010_005285 +2010_005287 +2010_005292 +2010_005293 +2010_005297 +2010_005299 +2010_005301 +2010_005303 +2010_005305 +2010_005306 +2010_005308 +2010_005309 +2010_005310 +2010_005312 +2010_005314 +2010_005317 +2010_005318 +2010_005320 +2010_005323 +2010_005327 +2010_005330 +2010_005331 +2010_005332 +2010_005338 +2010_005340 +2010_005344 +2010_005345 +2010_005346 +2010_005349 +2010_005350 +2010_005352 +2010_005353 +2010_005359 +2010_005361 +2010_005364 +2010_005365 +2010_005366 +2010_005369 +2010_005371 +2010_005372 +2010_005374 +2010_005375 +2010_005376 +2010_005377 +2010_005379 +2010_005382 +2010_005384 +2010_005385 +2010_005386 +2010_005388 +2010_005389 +2010_005391 +2010_005393 +2010_005394 +2010_005398 +2010_005401 +2010_005402 +2010_005403 +2010_005405 +2010_005406 +2010_005408 +2010_005409 +2010_005410 +2010_005414 +2010_005415 +2010_005416 +2010_005417 +2010_005419 +2010_005421 +2010_005424 +2010_005425 +2010_005426 +2010_005428 +2010_005429 +2010_005432 +2010_005433 +2010_005434 +2010_005437 +2010_005441 +2010_005442 +2010_005448 +2010_005450 +2010_005452 +2010_005455 +2010_005456 +2010_005457 +2010_005458 +2010_005462 +2010_005463 +2010_005466 +2010_005467 +2010_005468 +2010_005471 +2010_005472 +2010_005474 +2010_005475 +2010_005480 +2010_005482 +2010_005483 +2010_005484 +2010_005489 +2010_005491 +2010_005492 +2010_005493 +2010_005494 +2010_005496 +2010_005497 +2010_005498 +2010_005500 +2010_005501 +2010_005502 +2010_005505 +2010_005506 +2010_005508 +2010_005511 +2010_005512 +2010_005513 +2010_005514 +2010_005515 +2010_005516 +2010_005518 +2010_005519 +2010_005522 +2010_005527 +2010_005531 +2010_005532 +2010_005534 +2010_005535 +2010_005536 +2010_005538 +2010_005540 +2010_005542 +2010_005543 +2010_005546 +2010_005548 +2010_005551 +2010_005556 +2010_005557 +2010_005559 +2010_005561 +2010_005562 +2010_005565 +2010_005566 +2010_005567 +2010_005570 +2010_005571 +2010_005572 +2010_005573 +2010_005575 +2010_005576 +2010_005578 +2010_005582 +2010_005584 +2010_005585 +2010_005586 +2010_005587 +2010_005588 +2010_005591 +2010_005592 +2010_005593 +2010_005594 +2010_005595 +2010_005596 +2010_005597 +2010_005601 +2010_005603 +2010_005604 +2010_005606 +2010_005608 +2010_005610 +2010_005612 +2010_005614 +2010_005615 +2010_005616 +2010_005619 +2010_005620 +2010_005625 +2010_005626 +2010_005627 +2010_005628 +2010_005629 +2010_005632 +2010_005635 +2010_005636 +2010_005637 +2010_005640 +2010_005643 +2010_005644 +2010_005646 +2010_005647 +2010_005651 +2010_005652 +2010_005654 +2010_005657 +2010_005658 +2010_005663 +2010_005664 +2010_005665 +2010_005666 +2010_005668 +2010_005669 +2010_005670 +2010_005671 +2010_005672 +2010_005676 +2010_005678 +2010_005681 +2010_005683 +2010_005684 +2010_005688 +2010_005692 +2010_005696 +2010_005697 +2010_005700 +2010_005705 +2010_005706 +2010_005709 +2010_005712 +2010_005715 +2010_005716 +2010_005718 +2010_005719 +2010_005721 +2010_005723 +2010_005725 +2010_005727 +2010_005731 +2010_005732 +2010_005733 +2010_005734 +2010_005735 +2010_005736 +2010_005738 +2010_005740 +2010_005744 +2010_005746 +2010_005747 +2010_005748 +2010_005750 +2010_005752 +2010_005753 +2010_005754 +2010_005755 +2010_005756 +2010_005758 +2010_005761 +2010_005762 +2010_005763 +2010_005764 +2010_005767 +2010_005768 +2010_005770 +2010_005775 +2010_005776 +2010_005777 +2010_005780 +2010_005782 +2010_005784 +2010_005785 +2010_005788 +2010_005791 +2010_005794 +2010_005796 +2010_005800 +2010_005804 +2010_005805 +2010_005806 +2010_005807 +2010_005810 +2010_005815 +2010_005816 +2010_005817 +2010_005820 +2010_005821 +2010_005823 +2010_005824 +2010_005825 +2010_005826 +2010_005827 +2010_005830 +2010_005833 +2010_005835 +2010_005836 +2010_005837 +2010_005838 +2010_005840 +2010_005841 +2010_005843 +2010_005845 +2010_005847 +2010_005848 +2010_005849 +2010_005853 +2010_005854 +2010_005855 +2010_005860 +2010_005865 +2010_005867 +2010_005868 +2010_005870 +2010_005871 +2010_005874 +2010_005875 +2010_005876 +2010_005877 +2010_005882 +2010_005883 +2010_005884 +2010_005885 +2010_005886 +2010_005888 +2010_005891 +2010_005892 +2010_005894 +2010_005896 +2010_005897 +2010_005898 +2010_005899 +2010_005901 +2010_005903 +2010_005904 +2010_005906 +2010_005907 +2010_005909 +2010_005914 +2010_005919 +2010_005921 +2010_005922 +2010_005927 +2010_005928 +2010_005929 +2010_005930 +2010_005932 +2010_005934 +2010_005935 +2010_005936 +2010_005937 +2010_005938 +2010_005942 +2010_005943 +2010_005948 +2010_005949 +2010_005951 +2010_005952 +2010_005953 +2010_005954 +2010_005957 +2010_005958 +2010_005959 +2010_005960 +2010_005967 +2010_005968 +2010_005972 +2010_005973 +2010_005974 +2010_005975 +2010_005976 +2010_005978 +2010_005980 +2010_005981 +2010_005982 +2010_005984 +2010_005985 +2010_005986 +2010_005987 +2010_005991 +2010_005992 +2010_005993 +2010_005995 +2010_005996 +2010_005997 +2010_005998 +2010_006000 +2010_006003 +2010_006004 +2010_006009 +2010_006010 +2010_006011 +2010_006012 +2010_006015 +2010_006021 +2010_006023 +2010_006025 +2010_006026 +2010_006028 +2010_006031 +2010_006032 +2010_006033 +2010_006034 +2010_006035 +2010_006037 +2010_006040 +2010_006041 +2010_006042 +2010_006050 +2010_006051 +2010_006054 +2010_006056 +2010_006057 +2010_006058 +2010_006061 +2010_006062 +2010_006063 +2010_006066 +2010_006067 +2010_006070 +2010_006073 +2010_006076 +2010_006078 +2010_006079 +2010_006082 +2010_006084 +2010_006086 +2011_000002 +2011_000003 +2011_000006 +2011_000007 +2011_000009 +2011_000010 +2011_000012 +2011_000016 +2011_000017 +2011_000022 +2011_000025 +2011_000027 +2011_000028 +2011_000030 +2011_000034 +2011_000036 +2011_000037 +2011_000038 +2011_000041 +2011_000043 +2011_000044 +2011_000045 +2011_000048 +2011_000051 +2011_000052 +2011_000053 +2011_000054 +2011_000057 +2011_000058 +2011_000060 +2011_000061 +2011_000065 +2011_000066 +2011_000068 +2011_000069 +2011_000070 +2011_000071 +2011_000072 +2011_000076 +2011_000077 +2011_000082 +2011_000083 +2011_000084 +2011_000086 +2011_000087 +2011_000090 +2011_000094 +2011_000095 +2011_000096 +2011_000098 +2011_000102 +2011_000103 +2011_000105 +2011_000108 +2011_000109 +2011_000112 +2011_000114 +2011_000116 +2011_000122 +2011_000124 +2011_000128 +2011_000129 +2011_000130 +2011_000137 +2011_000138 +2011_000142 +2011_000145 +2011_000146 +2011_000147 +2011_000149 +2011_000152 +2011_000161 +2011_000162 +2011_000163 +2011_000165 +2011_000166 +2011_000173 +2011_000176 +2011_000178 +2011_000180 +2011_000181 +2011_000182 +2011_000185 +2011_000192 +2011_000194 +2011_000195 +2011_000196 +2011_000197 +2011_000202 +2011_000206 +2011_000208 +2011_000210 +2011_000213 +2011_000214 +2011_000216 +2011_000219 +2011_000220 +2011_000221 +2011_000222 +2011_000224 +2011_000226 +2011_000228 +2011_000229 +2011_000232 +2011_000233 +2011_000234 +2011_000238 +2011_000239 +2011_000241 +2011_000243 +2011_000246 +2011_000248 +2011_000249 +2011_000250 +2011_000252 +2011_000253 +2011_000257 +2011_000258 +2011_000267 +2011_000268 +2011_000269 +2011_000273 +2011_000276 +2011_000277 +2011_000278 +2011_000282 +2011_000283 +2011_000285 +2011_000286 +2011_000288 +2011_000290 +2011_000291 +2011_000293 +2011_000297 +2011_000299 +2011_000304 +2011_000305 +2011_000307 +2011_000309 +2011_000310 +2011_000312 +2011_000314 +2011_000315 +2011_000317 +2011_000319 +2011_000320 +2011_000321 +2011_000322 +2011_000324 +2011_000329 +2011_000332 +2011_000338 +2011_000342 +2011_000343 +2011_000344 +2011_000345 +2011_000346 +2011_000347 +2011_000359 +2011_000361 +2011_000362 +2011_000364 +2011_000369 +2011_000370 +2011_000374 +2011_000375 +2011_000376 +2011_000379 +2011_000382 +2011_000383 +2011_000385 +2011_000386 +2011_000388 +2011_000391 +2011_000392 +2011_000396 +2011_000397 +2011_000398 +2011_000399 +2011_000400 +2011_000404 +2011_000408 +2011_000412 +2011_000413 +2011_000416 +2011_000418 +2011_000419 +2011_000420 +2011_000426 +2011_000427 +2011_000428 +2011_000430 +2011_000432 +2011_000434 +2011_000435 +2011_000436 +2011_000438 +2011_000442 +2011_000444 +2011_000445 +2011_000449 +2011_000450 +2011_000453 +2011_000454 +2011_000455 +2011_000456 +2011_000457 +2011_000461 +2011_000465 +2011_000468 +2011_000469 +2011_000471 +2011_000472 +2011_000474 +2011_000475 +2011_000477 +2011_000479 +2011_000481 +2011_000482 +2011_000485 +2011_000487 +2011_000491 +2011_000492 +2011_000494 +2011_000496 +2011_000498 +2011_000499 +2011_000502 +2011_000503 +2011_000505 +2011_000509 +2011_000511 +2011_000512 +2011_000513 +2011_000514 +2011_000518 +2011_000519 +2011_000520 +2011_000521 +2011_000526 +2011_000530 +2011_000531 +2011_000532 +2011_000534 +2011_000536 +2011_000538 +2011_000541 +2011_000542 +2011_000548 +2011_000550 +2011_000551 +2011_000554 +2011_000556 +2011_000557 +2011_000558 +2011_000559 +2011_000560 +2011_000565 +2011_000566 +2011_000567 +2011_000569 +2011_000572 +2011_000573 +2011_000575 +2011_000577 +2011_000578 +2011_000579 +2011_000585 +2011_000586 +2011_000589 +2011_000592 +2011_000594 +2011_000596 +2011_000598 +2011_000600 +2011_000607 +2011_000608 +2011_000609 +2011_000612 +2011_000618 +2011_000621 +2011_000622 +2011_000627 +2011_000628 +2011_000629 +2011_000630 +2011_000631 +2011_000634 +2011_000637 +2011_000638 +2011_000641 +2011_000642 +2011_000646 +2011_000651 +2011_000652 +2011_000655 +2011_000656 +2011_000657 +2011_000658 +2011_000661 +2011_000666 +2011_000669 +2011_000673 +2011_000675 +2011_000679 +2011_000682 +2011_000683 +2011_000684 +2011_000685 +2011_000688 +2011_000689 +2011_000690 +2011_000692 +2011_000698 +2011_000701 +2011_000703 +2011_000704 +2011_000709 +2011_000711 +2011_000713 +2011_000718 +2011_000724 +2011_000725 +2011_000730 +2011_000731 +2011_000734 +2011_000743 +2011_000744 +2011_000745 +2011_000747 +2011_000748 +2011_000749 +2011_000753 +2011_000755 +2011_000757 +2011_000758 +2011_000759 +2011_000763 +2011_000765 +2011_000767 +2011_000768 +2011_000769 +2011_000770 +2011_000771 +2011_000772 +2011_000774 +2011_000778 +2011_000780 +2011_000784 +2011_000785 +2011_000788 +2011_000789 +2011_000790 +2011_000791 +2011_000793 +2011_000800 +2011_000804 +2011_000806 +2011_000807 +2011_000809 +2011_000813 +2011_000815 +2011_000819 +2011_000820 +2011_000823 +2011_000824 +2011_000827 +2011_000828 +2011_000829 +2011_000830 +2011_000831 +2011_000834 +2011_000837 +2011_000839 +2011_000840 +2011_000843 +2011_000845 +2011_000847 +2011_000848 +2011_000850 +2011_000851 +2011_000853 +2011_000855 +2011_000858 +2011_000859 +2011_000872 +2011_000874 +2011_000875 +2011_000882 +2011_000885 +2011_000887 +2011_000888 +2011_000893 +2011_000895 +2011_000897 +2011_000898 +2011_000899 +2011_000900 +2011_000901 +2011_000908 +2011_000909 +2011_000912 +2011_000917 +2011_000919 +2011_000920 +2011_000922 +2011_000927 +2011_000930 +2011_000932 +2011_000933 +2011_000934 +2011_000940 +2011_000944 +2011_000947 +2011_000950 +2011_000951 +2011_000953 +2011_000954 +2011_000957 +2011_000961 +2011_000965 +2011_000969 +2011_000971 +2011_000973 +2011_000975 +2011_000977 +2011_000979 +2011_000981 +2011_000982 +2011_000983 +2011_000986 +2011_000987 +2011_000990 +2011_000991 +2011_000996 +2011_000997 +2011_000999 +2011_001001 +2011_001004 +2011_001005 +2011_001008 +2011_001009 +2011_001010 +2011_001011 +2011_001014 +2011_001015 +2011_001016 +2011_001019 +2011_001020 +2011_001022 +2011_001023 +2011_001025 +2011_001027 +2011_001028 +2011_001029 +2011_001030 +2011_001031 +2011_001032 +2011_001033 +2011_001034 +2011_001036 +2011_001040 +2011_001044 +2011_001047 +2011_001052 +2011_001054 +2011_001055 +2011_001056 +2011_001058 +2011_001060 +2011_001062 +2011_001064 +2011_001066 +2011_001069 +2011_001071 +2011_001073 +2011_001079 +2011_001080 +2011_001081 +2011_001082 +2011_001084 +2011_001086 +2011_001091 +2011_001093 +2011_001097 +2011_001100 +2011_001105 +2011_001106 +2011_001107 +2011_001110 +2011_001111 +2011_001114 +2011_001116 +2011_001117 +2011_001123 +2011_001124 +2011_001126 +2011_001127 +2011_001128 +2011_001133 +2011_001134 +2011_001135 +2011_001136 +2011_001137 +2011_001138 +2011_001139 +2011_001144 +2011_001146 +2011_001149 +2011_001150 +2011_001152 +2011_001153 +2011_001158 +2011_001159 +2011_001160 +2011_001161 +2011_001163 +2011_001166 +2011_001167 +2011_001168 +2011_001169 +2011_001173 +2011_001175 +2011_001176 +2011_001188 +2011_001189 +2011_001190 +2011_001192 +2011_001193 +2011_001198 +2011_001201 +2011_001203 +2011_001208 +2011_001211 +2011_001213 +2011_001215 +2011_001216 +2011_001217 +2011_001220 +2011_001221 +2011_001223 +2011_001226 +2011_001227 +2011_001229 +2011_001232 +2011_001238 +2011_001240 +2011_001245 +2011_001246 +2011_001251 +2011_001252 +2011_001253 +2011_001254 +2011_001255 +2011_001257 +2011_001259 +2011_001260 +2011_001261 +2011_001263 +2011_001264 +2011_001266 +2011_001270 +2011_001271 +2011_001272 +2011_001276 +2011_001277 +2011_001281 +2011_001282 +2011_001283 +2011_001284 +2011_001285 +2011_001286 +2011_001287 +2011_001288 +2011_001290 +2011_001292 +2011_001295 +2011_001302 +2011_001304 +2011_001305 +2011_001310 +2011_001311 +2011_001313 +2011_001315 +2011_001318 +2011_001319 +2011_001320 +2011_001323 +2011_001326 +2011_001327 +2011_001329 +2011_001330 +2011_001333 +2011_001335 +2011_001336 +2011_001337 +2011_001341 +2011_001344 +2011_001346 +2011_001350 +2011_001354 +2011_001355 +2011_001357 +2011_001360 +2011_001366 +2011_001369 +2011_001370 +2011_001373 +2011_001375 +2011_001381 +2011_001382 +2011_001384 +2011_001387 +2011_001388 +2011_001389 +2011_001390 +2011_001394 +2011_001399 +2011_001400 +2011_001402 +2011_001404 +2011_001406 +2011_001407 +2011_001411 +2011_001412 +2011_001414 +2011_001416 +2011_001421 +2011_001422 +2011_001424 +2011_001432 +2011_001434 +2011_001440 +2011_001441 +2011_001447 +2011_001449 +2011_001451 +2011_001455 +2011_001456 +2011_001463 +2011_001464 +2011_001466 +2011_001467 +2011_001471 +2011_001475 +2011_001476 +2011_001479 +2011_001480 +2011_001489 +2011_001498 +2011_001501 +2011_001503 +2011_001505 +2011_001507 +2011_001508 +2011_001510 +2011_001514 +2011_001518 +2011_001519 +2011_001521 +2011_001524 +2011_001525 +2011_001526 +2011_001529 +2011_001530 +2011_001531 +2011_001532 +2011_001534 +2011_001535 +2011_001536 +2011_001537 +2011_001538 +2011_001541 +2011_001542 +2011_001543 +2011_001544 +2011_001546 +2011_001547 +2011_001549 +2011_001557 +2011_001558 +2011_001560 +2011_001566 +2011_001567 +2011_001568 +2011_001571 +2011_001572 +2011_001573 +2011_001582 +2011_001586 +2011_001589 +2011_001591 +2011_001592 +2011_001596 +2011_001597 +2011_001599 +2011_001600 +2011_001601 +2011_001602 +2011_001605 +2011_001606 +2011_001607 +2011_001608 +2011_001611 +2011_001612 +2011_001613 +2011_001614 +2011_001616 +2011_001618 +2011_001619 +2011_001620 +2011_001621 +2011_001622 +2011_001624 +2011_001625 +2011_001628 +2011_001629 +2011_001632 +2011_001641 +2011_001642 +2011_001643 +2011_001647 +2011_001649 +2011_001650 +2011_001652 +2011_001653 +2011_001655 +2011_001656 +2011_001662 +2011_001663 +2011_001665 +2011_001666 +2011_001669 +2011_001671 +2011_001673 +2011_001674 +2011_001678 +2011_001679 +2011_001689 +2011_001691 +2011_001693 +2011_001694 +2011_001695 +2011_001698 +2011_001699 +2011_001700 +2011_001705 +2011_001707 +2011_001708 +2011_001710 +2011_001712 +2011_001713 +2011_001714 +2011_001715 +2011_001716 +2011_001719 +2011_001720 +2011_001722 +2011_001726 +2011_001727 +2011_001730 +2011_001732 +2011_001733 +2011_001739 +2011_001740 +2011_001741 +2011_001745 +2011_001747 +2011_001748 +2011_001751 +2011_001753 +2011_001754 +2011_001755 +2011_001757 +2011_001764 +2011_001765 +2011_001766 +2011_001769 +2011_001770 +2011_001771 +2011_001775 +2011_001776 +2011_001779 +2011_001782 +2011_001785 +2011_001789 +2011_001790 +2011_001791 +2011_001793 +2011_001794 +2011_001796 +2011_001799 +2011_001800 +2011_001801 +2011_001805 +2011_001806 +2011_001810 +2011_001811 +2011_001812 +2011_001815 +2011_001819 +2011_001820 +2011_001822 +2011_001824 +2011_001825 +2011_001826 +2011_001827 +2011_001833 +2011_001834 +2011_001837 +2011_001840 +2011_001841 +2011_001842 +2011_001845 +2011_001847 +2011_001854 +2011_001855 +2011_001856 +2011_001858 +2011_001862 +2011_001863 +2011_001866 +2011_001868 +2011_001870 +2011_001871 +2011_001872 +2011_001873 +2011_001875 +2011_001876 +2011_001877 +2011_001880 +2011_001884 +2011_001885 +2011_001886 +2011_001889 +2011_001891 +2011_001893 +2011_001895 +2011_001896 +2011_001900 +2011_001901 +2011_001902 +2011_001904 +2011_001906 +2011_001910 +2011_001911 +2011_001914 +2011_001919 +2011_001920 +2011_001922 +2011_001924 +2011_001926 +2011_001927 +2011_001928 +2011_001929 +2011_001930 +2011_001932 +2011_001937 +2011_001938 +2011_001941 +2011_001942 +2011_001944 +2011_001945 +2011_001946 +2011_001949 +2011_001950 +2011_001951 +2011_001952 +2011_001956 +2011_001959 +2011_001961 +2011_001962 +2011_001964 +2011_001966 +2011_001967 +2011_001971 +2011_001972 +2011_001974 +2011_001975 +2011_001977 +2011_001980 +2011_001982 +2011_001984 +2011_001986 +2011_001987 +2011_001988 +2011_001989 +2011_001991 +2011_002002 +2011_002003 +2011_002004 +2011_002005 +2011_002006 +2011_002012 +2011_002016 +2011_002018 +2011_002019 +2011_002021 +2011_002022 +2011_002027 +2011_002031 +2011_002033 +2011_002034 +2011_002036 +2011_002038 +2011_002039 +2011_002040 +2011_002041 +2011_002042 +2011_002044 +2011_002045 +2011_002046 +2011_002047 +2011_002049 +2011_002050 +2011_002053 +2011_002055 +2011_002062 +2011_002063 +2011_002064 +2011_002073 +2011_002074 +2011_002075 +2011_002079 +2011_002085 +2011_002088 +2011_002091 +2011_002093 +2011_002096 +2011_002097 +2011_002098 +2011_002100 +2011_002102 +2011_002105 +2011_002106 +2011_002107 +2011_002108 +2011_002109 +2011_002110 +2011_002111 +2011_002113 +2011_002114 +2011_002116 +2011_002119 +2011_002121 +2011_002124 +2011_002128 +2011_002131 +2011_002132 +2011_002134 +2011_002135 +2011_002137 +2011_002142 +2011_002143 +2011_002144 +2011_002147 +2011_002148 +2011_002149 +2011_002150 +2011_002154 +2011_002156 +2011_002158 +2011_002159 +2011_002160 +2011_002163 +2011_002167 +2011_002169 +2011_002173 +2011_002174 +2011_002177 +2011_002178 +2011_002179 +2011_002184 +2011_002185 +2011_002186 +2011_002189 +2011_002192 +2011_002193 +2011_002200 +2011_002211 +2011_002215 +2011_002218 +2011_002221 +2011_002222 +2011_002223 +2011_002224 +2011_002227 +2011_002228 +2011_002230 +2011_002234 +2011_002236 +2011_002237 +2011_002239 +2011_002241 +2011_002244 +2011_002245 +2011_002246 +2011_002247 +2011_002248 +2011_002251 +2011_002252 +2011_002253 +2011_002260 +2011_002265 +2011_002268 +2011_002269 +2011_002270 +2011_002272 +2011_002273 +2011_002276 +2011_002278 +2011_002279 +2011_002280 +2011_002281 +2011_002284 +2011_002291 +2011_002292 +2011_002294 +2011_002295 +2011_002298 +2011_002300 +2011_002301 +2011_002303 +2011_002308 +2011_002312 +2011_002317 +2011_002318 +2011_002322 +2011_002324 +2011_002325 +2011_002327 +2011_002330 +2011_002335 +2011_002341 +2011_002343 +2011_002346 +2011_002347 +2011_002348 +2011_002350 +2011_002357 +2011_002358 +2011_002359 +2011_002362 +2011_002365 +2011_002366 +2011_002371 +2011_002379 +2011_002380 +2011_002381 +2011_002384 +2011_002385 +2011_002386 +2011_002387 +2011_002388 +2011_002389 +2011_002391 +2011_002393 +2011_002394 +2011_002395 +2011_002396 +2011_002397 +2011_002398 +2011_002402 +2011_002406 +2011_002407 +2011_002409 +2011_002410 +2011_002413 +2011_002414 +2011_002418 +2011_002419 +2011_002420 +2011_002421 +2011_002422 +2011_002429 +2011_002433 +2011_002435 +2011_002436 +2011_002443 +2011_002447 +2011_002448 +2011_002453 +2011_002455 +2011_002457 +2011_002458 +2011_002459 +2011_002460 +2011_002461 +2011_002462 +2011_002463 +2011_002464 +2011_002470 +2011_002474 +2011_002476 +2011_002479 +2011_002482 +2011_002484 +2011_002488 +2011_002490 +2011_002491 +2011_002492 +2011_002494 +2011_002495 +2011_002498 +2011_002503 +2011_002504 +2011_002505 +2011_002507 +2011_002509 +2011_002511 +2011_002514 +2011_002515 +2011_002516 +2011_002519 +2011_002520 +2011_002526 +2011_002528 +2011_002531 +2011_002532 +2011_002533 +2011_002535 +2011_002536 +2011_002542 +2011_002543 +2011_002548 +2011_002551 +2011_002552 +2011_002553 +2011_002554 +2011_002555 +2011_002556 +2011_002558 +2011_002559 +2011_002560 +2011_002561 +2011_002566 +2011_002567 +2011_002568 +2011_002571 +2011_002575 +2011_002578 +2011_002579 +2011_002582 +2011_002583 +2011_002584 +2011_002585 +2011_002588 +2011_002589 +2011_002590 +2011_002592 +2011_002594 +2011_002598 +2011_002601 +2011_002605 +2011_002606 +2011_002609 +2011_002610 +2011_002612 +2011_002614 +2011_002616 +2011_002617 +2011_002618 +2011_002620 +2011_002623 +2011_002624 +2011_002629 +2011_002631 +2011_002636 +2011_002638 +2011_002639 +2011_002640 +2011_002641 +2011_002644 +2011_002649 +2011_002650 +2011_002652 +2011_002656 +2011_002657 +2011_002658 +2011_002661 +2011_002662 +2011_002664 +2011_002673 +2011_002674 +2011_002675 +2011_002676 +2011_002677 +2011_002678 +2011_002685 +2011_002687 +2011_002694 +2011_002697 +2011_002699 +2011_002706 +2011_002709 +2011_002713 +2011_002714 +2011_002715 +2011_002717 +2011_002719 +2011_002724 +2011_002725 +2011_002726 +2011_002730 +2011_002738 +2011_002740 +2011_002742 +2011_002746 +2011_002748 +2011_002750 +2011_002751 +2011_002752 +2011_002754 +2011_002756 +2011_002760 +2011_002765 +2011_002766 +2011_002767 +2011_002770 +2011_002772 +2011_002775 +2011_002776 +2011_002779 +2011_002780 +2011_002782 +2011_002784 +2011_002786 +2011_002790 +2011_002795 +2011_002796 +2011_002798 +2011_002802 +2011_002803 +2011_002805 +2011_002808 +2011_002810 +2011_002811 +2011_002812 +2011_002814 +2011_002817 +2011_002818 +2011_002821 +2011_002823 +2011_002826 +2011_002830 +2011_002831 +2011_002833 +2011_002834 +2011_002838 +2011_002841 +2011_002842 +2011_002851 +2011_002852 +2011_002854 +2011_002863 +2011_002864 +2011_002867 +2011_002868 +2011_002870 +2011_002871 +2011_002872 +2011_002873 +2011_002879 +2011_002880 +2011_002881 +2011_002883 +2011_002884 +2011_002885 +2011_002887 +2011_002889 +2011_002890 +2011_002897 +2011_002900 +2011_002908 +2011_002911 +2011_002912 +2011_002913 +2011_002916 +2011_002917 +2011_002920 +2011_002921 +2011_002924 +2011_002925 +2011_002927 +2011_002929 +2011_002930 +2011_002932 +2011_002933 +2011_002935 +2011_002937 +2011_002938 +2011_002940 +2011_002942 +2011_002943 +2011_002944 +2011_002947 +2011_002949 +2011_002951 +2011_002953 +2011_002956 +2011_002958 +2011_002962 +2011_002965 +2011_002966 +2011_002967 +2011_002969 +2011_002970 +2011_002971 +2011_002974 +2011_002975 +2011_002978 +2011_002979 +2011_002983 +2011_002985 +2011_002987 +2011_002988 +2011_002992 +2011_002993 +2011_002994 +2011_002997 +2011_002999 +2011_003002 +2011_003003 +2011_003005 +2011_003010 +2011_003011 +2011_003012 +2011_003013 +2011_003016 +2011_003019 +2011_003020 +2011_003023 +2011_003025 +2011_003027 +2011_003028 +2011_003029 +2011_003030 +2011_003034 +2011_003038 +2011_003039 +2011_003041 +2011_003043 +2011_003044 +2011_003047 +2011_003048 +2011_003049 +2011_003050 +2011_003054 +2011_003055 +2011_003057 +2011_003059 +2011_003063 +2011_003065 +2011_003066 +2011_003073 +2011_003074 +2011_003076 +2011_003078 +2011_003079 +2011_003081 +2011_003085 +2011_003086 +2011_003089 +2011_003091 +2011_003097 +2011_003098 +2011_003103 +2011_003109 +2011_003111 +2011_003114 +2011_003115 +2011_003121 +2011_003124 +2011_003132 +2011_003134 +2011_003138 +2011_003141 +2011_003145 +2011_003146 +2011_003148 +2011_003149 +2011_003150 +2011_003151 +2011_003152 +2011_003154 +2011_003158 +2011_003159 +2011_003162 +2011_003163 +2011_003166 +2011_003167 +2011_003168 +2011_003169 +2011_003171 +2011_003176 +2011_003177 +2011_003182 +2011_003183 +2011_003184 +2011_003185 +2011_003187 +2011_003188 +2011_003192 +2011_003194 +2011_003197 +2011_003201 +2011_003205 +2011_003207 +2011_003211 +2011_003212 +2011_003213 +2011_003216 +2011_003220 +2011_003223 +2011_003228 +2011_003230 +2011_003232 +2011_003236 +2011_003238 +2011_003240 +2011_003242 +2011_003244 +2011_003246 +2011_003247 +2011_003253 +2011_003254 +2011_003255 +2011_003256 +2011_003259 +2011_003260 +2011_003261 +2011_003262 +2011_003269 +2011_003271 +2011_003274 +2011_003275 +2011_003276 diff --git a/ImageSets/Main/val.txt b/ImageSets/Main/val.txt new file mode 100644 index 0000000..0349a31 --- /dev/null +++ b/ImageSets/Main/val.txt @@ -0,0 +1,8333 @@ +000005 +000007 +000009 +000016 +000019 +000020 +000021 +000024 +000030 +000039 +000041 +000046 +000050 +000051 +000052 +000060 +000063 +000065 +000072 +000081 +000093 +000095 +000099 +000101 +000102 +000107 +000109 +000110 +000113 +000117 +000118 +000120 +000121 +000123 +000125 +000130 +000131 +000132 +000142 +000143 +000146 +000150 +000156 +000158 +000165 +000169 +000170 +000177 +000180 +000184 +000190 +000203 +000208 +000210 +000211 +000214 +000215 +000218 +000221 +000224 +000229 +000232 +000233 +000236 +000241 +000244 +000245 +000246 +000249 +000251 +000257 +000266 +000268 +000269 +000270 +000275 +000285 +000289 +000298 +000302 +000303 +000304 +000305 +000308 +000318 +000321 +000322 +000323 +000328 +000329 +000332 +000336 +000338 +000340 +000343 +000352 +000354 +000363 +000373 +000374 +000380 +000381 +000396 +000403 +000408 +000417 +000419 +000420 +000424 +000427 +000428 +000433 +000435 +000439 +000443 +000448 +000459 +000460 +000461 +000462 +000464 +000480 +000482 +000483 +000486 +000491 +000492 +000494 +000498 +000499 +000500 +000501 +000509 +000513 +000514 +000515 +000520 +000523 +000530 +000531 +000540 +000543 +000545 +000563 +000564 +000579 +000581 +000582 +000588 +000591 +000598 +000599 +000601 +000608 +000610 +000613 +000619 +000626 +000628 +000637 +000645 +000647 +000653 +000656 +000660 +000661 +000663 +000667 +000675 +000676 +000677 +000682 +000684 +000686 +000690 +000694 +000702 +000705 +000707 +000712 +000713 +000714 +000717 +000720 +000728 +000730 +000738 +000742 +000746 +000748 +000750 +000752 +000755 +000756 +000760 +000763 +000771 +000772 +000776 +000777 +000780 +000782 +000786 +000787 +000791 +000794 +000797 +000799 +000800 +000802 +000806 +000808 +000814 +000815 +000816 +000826 +000831 +000832 +000834 +000842 +000843 +000847 +000848 +000854 +000855 +000857 +000862 +000863 +000868 +000872 +000874 +000876 +000878 +000879 +000880 +000882 +000885 +000895 +000896 +000903 +000911 +000917 +000918 +000920 +000921 +000923 +000926 +000931 +000934 +000935 +000937 +000946 +000947 +000948 +000949 +000971 +000972 +000973 +000982 +001004 +001009 +001012 +001017 +001018 +001027 +001028 +001041 +001042 +001045 +001052 +001053 +001056 +001061 +001062 +001066 +001069 +001072 +001074 +001083 +001084 +001091 +001092 +001093 +001097 +001102 +001104 +001107 +001109 +001110 +001121 +001124 +001125 +001136 +001137 +001142 +001143 +001144 +001145 +001148 +001149 +001154 +001160 +001161 +001164 +001166 +001170 +001175 +001176 +001184 +001185 +001186 +001187 +001192 +001199 +001200 +001201 +001203 +001206 +001211 +001215 +001221 +001224 +001225 +001231 +001233 +001236 +001241 +001247 +001250 +001254 +001259 +001260 +001265 +001266 +001272 +001274 +001277 +001281 +001284 +001286 +001288 +001289 +001290 +001292 +001293 +001298 +001310 +001311 +001316 +001324 +001330 +001337 +001341 +001343 +001350 +001352 +001360 +001361 +001362 +001371 +001375 +001383 +001386 +001387 +001397 +001400 +001413 +001430 +001432 +001439 +001441 +001443 +001444 +001445 +001460 +001463 +001464 +001465 +001466 +001467 +001472 +001475 +001481 +001484 +001490 +001493 +001497 +001509 +001510 +001514 +001522 +001523 +001531 +001536 +001537 +001541 +001543 +001544 +001545 +001553 +001554 +001561 +001565 +001571 +001577 +001582 +001588 +001595 +001598 +001603 +001608 +001614 +001617 +001618 +001628 +001632 +001638 +001640 +001642 +001647 +001653 +001675 +001677 +001678 +001682 +001685 +001686 +001689 +001691 +001693 +001718 +001724 +001725 +001726 +001727 +001730 +001746 +001747 +001749 +001755 +001756 +001771 +001772 +001775 +001778 +001782 +001784 +001785 +001793 +001795 +001797 +001799 +001801 +001807 +001816 +001818 +001827 +001830 +001833 +001837 +001842 +001847 +001849 +001855 +001860 +001862 +001872 +001875 +001877 +001878 +001882 +001887 +001888 +001899 +001901 +001907 +001911 +001918 +001920 +001927 +001931 +001932 +001933 +001934 +001936 +001940 +001944 +001948 +001958 +001962 +001964 +001970 +001972 +001976 +001982 +002000 +002011 +002019 +002021 +002022 +002023 +002024 +002030 +002036 +002045 +002054 +002058 +002063 +002064 +002067 +002070 +002082 +002083 +002086 +002088 +002090 +002091 +002094 +002098 +002099 +002101 +002102 +002109 +002112 +002114 +002124 +002125 +002129 +002135 +002136 +002140 +002142 +002145 +002146 +002152 +002163 +002165 +002169 +002171 +002174 +002181 +002183 +002184 +002190 +002201 +002202 +002209 +002213 +002214 +002218 +002220 +002226 +002228 +002233 +002244 +002248 +002251 +002257 +002259 +002261 +002263 +002266 +002267 +002268 +002270 +002272 +002273 +002276 +002278 +002281 +002285 +002288 +002290 +002300 +002302 +002305 +002308 +002324 +002328 +002329 +002330 +002332 +002333 +002337 +002340 +002343 +002345 +002348 +002352 +002361 +002364 +002366 +002367 +002369 +002371 +002372 +002374 +002375 +002376 +002377 +002378 +002382 +002385 +002387 +002391 +002393 +002404 +002407 +002415 +002417 +002425 +002427 +002435 +002437 +002441 +002444 +002450 +002452 +002454 +002456 +002459 +002460 +002462 +002470 +002476 +002477 +002479 +002491 +002492 +002493 +002497 +002504 +002505 +002508 +002513 +002520 +002523 +002524 +002525 +002529 +002537 +002540 +002542 +002546 +002549 +002561 +002563 +002565 +002566 +002567 +002578 +002584 +002585 +002586 +002589 +002593 +002598 +002600 +002605 +002606 +002613 +002615 +002618 +002621 +002632 +002633 +002636 +002637 +002641 +002643 +002646 +002649 +002657 +002658 +002659 +002667 +002668 +002670 +002675 +002677 +002678 +002689 +002690 +002693 +002695 +002696 +002699 +002706 +002709 +002714 +002717 +002718 +002721 +002723 +002727 +002732 +002734 +002741 +002747 +002751 +002760 +002762 +002767 +002772 +002775 +002776 +002784 +002785 +002786 +002794 +002798 +002800 +002803 +002810 +002812 +002815 +002827 +002833 +002835 +002836 +002838 +002842 +002847 +002854 +002859 +002875 +002879 +002880 +002884 +002886 +002889 +002891 +002893 +002896 +002901 +002910 +002912 +002913 +002915 +002916 +002917 +002924 +002932 +002933 +002935 +002938 +002940 +002941 +002942 +002943 +002944 +002946 +002947 +002952 +002954 +002960 +002963 +002965 +002966 +002967 +002977 +002978 +002984 +002986 +002994 +003000 +003004 +003005 +003008 +003009 +003015 +003017 +003021 +003023 +003028 +003031 +003032 +003038 +003039 +003044 +003045 +003054 +003056 +003057 +003058 +003064 +003065 +003072 +003078 +003082 +003086 +003089 +003090 +003093 +003094 +003098 +003102 +003112 +003117 +003118 +003120 +003121 +003126 +003127 +003129 +003137 +003142 +003154 +003162 +003164 +003170 +003176 +003177 +003178 +003186 +003189 +003194 +003195 +003199 +003200 +003207 +003210 +003213 +003216 +003218 +003219 +003223 +003228 +003239 +003243 +003250 +003255 +003256 +003258 +003262 +003271 +003272 +003274 +003285 +003293 +003294 +003296 +003299 +003300 +003301 +003307 +003311 +003313 +003316 +003325 +003327 +003335 +003344 +003351 +003360 +003362 +003370 +003376 +003377 +003386 +003390 +003391 +003397 +003398 +003403 +003404 +003407 +003410 +003415 +003419 +003422 +003425 +003429 +003435 +003443 +003444 +003449 +003451 +003453 +003455 +003458 +003461 +003462 +003464 +003465 +003468 +003469 +003470 +003492 +003516 +003518 +003519 +003521 +003528 +003530 +003536 +003537 +003546 +003554 +003556 +003566 +003567 +003580 +003587 +003589 +003593 +003594 +003597 +003606 +003611 +003618 +003620 +003623 +003632 +003636 +003638 +003639 +003640 +003648 +003651 +003654 +003655 +003657 +003660 +003667 +003669 +003673 +003674 +003675 +003684 +003685 +003690 +003691 +003696 +003703 +003706 +003708 +003709 +003711 +003717 +003721 +003722 +003727 +003729 +003750 +003753 +003754 +003760 +003772 +003774 +003780 +003783 +003791 +003793 +003796 +003798 +003803 +003808 +003809 +003814 +003820 +003821 +003826 +003837 +003838 +003844 +003845 +003846 +003848 +003855 +003857 +003863 +003868 +003869 +003871 +003872 +003876 +003877 +003885 +003886 +003891 +003895 +003905 +003911 +003915 +003918 +003919 +003923 +003924 +003926 +003937 +003941 +003946 +003947 +003948 +003954 +003957 +003960 +003963 +003965 +003966 +003973 +003979 +003984 +003986 +003990 +003992 +003994 +003996 +004003 +004010 +004011 +004015 +004020 +004025 +004031 +004039 +004047 +004051 +004057 +004060 +004066 +004069 +004073 +004075 +004076 +004077 +004082 +004085 +004087 +004089 +004102 +004105 +004108 +004110 +004113 +004117 +004122 +004135 +004141 +004142 +004143 +004145 +004148 +004150 +004174 +004178 +004185 +004186 +004191 +004192 +004193 +004194 +004195 +004203 +004204 +004205 +004212 +004229 +004230 +004239 +004246 +004257 +004258 +004259 +004264 +004265 +004274 +004275 +004279 +004284 +004286 +004293 +004295 +004298 +004304 +004310 +004312 +004321 +004323 +004326 +004329 +004331 +004341 +004346 +004349 +004351 +004352 +004354 +004356 +004364 +004368 +004369 +004380 +004384 +004390 +004396 +004397 +004405 +004409 +004411 +004421 +004423 +004424 +004429 +004430 +004432 +004433 +004437 +004438 +004446 +004450 +004455 +004457 +004459 +004463 +004464 +004466 +004468 +004474 +004487 +004488 +004490 +004493 +004494 +004495 +004498 +004499 +004507 +004509 +004512 +004518 +004527 +004528 +004530 +004532 +004535 +004539 +004542 +004552 +004555 +004558 +004574 +004581 +004585 +004588 +004592 +004600 +004601 +004606 +004609 +004618 +004626 +004630 +004632 +004647 +004649 +004652 +004653 +004654 +004655 +004660 +004662 +004672 +004673 +004674 +004676 +004682 +004689 +004692 +004699 +004707 +004708 +004719 +004722 +004727 +004732 +004746 +004750 +004761 +004768 +004770 +004777 +004785 +004786 +004788 +004789 +004796 +004805 +004812 +004814 +004816 +004818 +004825 +004826 +004831 +004834 +004839 +004840 +004850 +004852 +004856 +004859 +004863 +004866 +004867 +004868 +004872 +004878 +004886 +004890 +004895 +004896 +004903 +004912 +004916 +004926 +004928 +004931 +004935 +004936 +004938 +004939 +004943 +004948 +004950 +004953 +004954 +004956 +004960 +004963 +004967 +004977 +004982 +004983 +004985 +004986 +004994 +004997 +004998 +004999 +005003 +005014 +005028 +005036 +005037 +005039 +005042 +005054 +005055 +005056 +005062 +005063 +005064 +005067 +005072 +005077 +005079 +005081 +005085 +005102 +005104 +005110 +005111 +005116 +005128 +005131 +005135 +005136 +005144 +005145 +005146 +005150 +005159 +005160 +005161 +005175 +005176 +005179 +005185 +005195 +005199 +005209 +005210 +005212 +005214 +005220 +005222 +005224 +005229 +005230 +005239 +005242 +005248 +005253 +005254 +005263 +005264 +005267 +005268 +005270 +005274 +005278 +005281 +005293 +005298 +005305 +005306 +005312 +005314 +005315 +005319 +005320 +005325 +005326 +005328 +005331 +005340 +005343 +005346 +005348 +005349 +005350 +005352 +005355 +005365 +005367 +005370 +005371 +005378 +005379 +005380 +005383 +005384 +005385 +005393 +005395 +005397 +005398 +005407 +005416 +005418 +005419 +005421 +005423 +005429 +005430 +005431 +005434 +005436 +005438 +005439 +005441 +005454 +005461 +005465 +005469 +005470 +005471 +005475 +005481 +005485 +005486 +005497 +005507 +005510 +005517 +005518 +005521 +005522 +005530 +005531 +005535 +005539 +005549 +005550 +005552 +005554 +005559 +005573 +005576 +005577 +005583 +005584 +005586 +005588 +005590 +005593 +005606 +005608 +005613 +005614 +005615 +005618 +005620 +005629 +005640 +005641 +005645 +005647 +005652 +005653 +005655 +005657 +005660 +005662 +005664 +005672 +005674 +005676 +005679 +005682 +005685 +005687 +005693 +005696 +005701 +005702 +005714 +005716 +005719 +005723 +005729 +005732 +005736 +005741 +005743 +005747 +005749 +005755 +005760 +005761 +005762 +005768 +005773 +005779 +005781 +005788 +005790 +005791 +005794 +005799 +005811 +005812 +005815 +005818 +005819 +005825 +005828 +005829 +005830 +005839 +005841 +005845 +005852 +005853 +005854 +005856 +005863 +005868 +005874 +005875 +005877 +005878 +005879 +005894 +005897 +005906 +005912 +005914 +005917 +005919 +005928 +005940 +005952 +005954 +005956 +005963 +005968 +005970 +005975 +005979 +005981 +005985 +005988 +005989 +005991 +005995 +005996 +005998 +006000 +006001 +006005 +006012 +006018 +006026 +006027 +006028 +006029 +006035 +006041 +006042 +006045 +006046 +006055 +006058 +006062 +006069 +006071 +006084 +006089 +006097 +006098 +006107 +006108 +006111 +006117 +006120 +006124 +006125 +006129 +006133 +006136 +006139 +006146 +006148 +006150 +006151 +006153 +006159 +006161 +006163 +006184 +006185 +006188 +006190 +006198 +006201 +006202 +006203 +006206 +006209 +006212 +006214 +006215 +006216 +006218 +006219 +006220 +006222 +006233 +006234 +006235 +006240 +006241 +006249 +006252 +006254 +006258 +006259 +006260 +006269 +006276 +006277 +006281 +006282 +006284 +006286 +006295 +006296 +006300 +006301 +006306 +006309 +006314 +006318 +006319 +006321 +006323 +006325 +006330 +006335 +006337 +006338 +006339 +006346 +006348 +006350 +006351 +006355 +006357 +006377 +006385 +006387 +006391 +006392 +006396 +006398 +006404 +006409 +006421 +006424 +006425 +006428 +006430 +006437 +006440 +006443 +006444 +006445 +006449 +006450 +006456 +006463 +006465 +006468 +006473 +006480 +006484 +006488 +006492 +006497 +006507 +006509 +006512 +006519 +006520 +006529 +006530 +006532 +006534 +006538 +006542 +006543 +006553 +006562 +006565 +006570 +006572 +006575 +006576 +006578 +006583 +006584 +006585 +006587 +006588 +006593 +006599 +006603 +006606 +006611 +006617 +006618 +006619 +006621 +006625 +006628 +006631 +006632 +006643 +006645 +006647 +006657 +006661 +006664 +006666 +006667 +006668 +006670 +006671 +006673 +006677 +006678 +006679 +006681 +006682 +006687 +006690 +006696 +006699 +006702 +006709 +006718 +006719 +006722 +006725 +006730 +006739 +006747 +006751 +006759 +006760 +006761 +006762 +006765 +006768 +006769 +006772 +006783 +006786 +006789 +006797 +006799 +006800 +006802 +006803 +006808 +006813 +006814 +006819 +006821 +006827 +006828 +006829 +006835 +006838 +006841 +006842 +006850 +006855 +006859 +006860 +006862 +006865 +006867 +006876 +006878 +006880 +006884 +006886 +006892 +006903 +006908 +006918 +006922 +006924 +006932 +006933 +006934 +006935 +006940 +006944 +006945 +006949 +006952 +006953 +006956 +006962 +006963 +006965 +006966 +006972 +006981 +006987 +006988 +006989 +006990 +006994 +006995 +007004 +007008 +007009 +007020 +007021 +007022 +007031 +007035 +007038 +007042 +007046 +007048 +007049 +007052 +007054 +007056 +007058 +007059 +007065 +007068 +007070 +007071 +007074 +007077 +007084 +007086 +007097 +007100 +007101 +007104 +007109 +007114 +007117 +007122 +007123 +007132 +007139 +007140 +007141 +007144 +007146 +007147 +007148 +007149 +007153 +007162 +007165 +007167 +007172 +007174 +007187 +007189 +007191 +007200 +007204 +007208 +007210 +007211 +007212 +007215 +007216 +007217 +007224 +007227 +007230 +007236 +007244 +007245 +007247 +007249 +007258 +007259 +007260 +007266 +007270 +007274 +007275 +007276 +007280 +007283 +007284 +007292 +007294 +007296 +007297 +007299 +007300 +007302 +007311 +007314 +007318 +007329 +007330 +007343 +007344 +007346 +007350 +007356 +007359 +007363 +007372 +007374 +007376 +007383 +007388 +007390 +007408 +007414 +007416 +007422 +007424 +007427 +007432 +007433 +007435 +007436 +007438 +007439 +007443 +007445 +007448 +007449 +007451 +007457 +007460 +007461 +007465 +007470 +007475 +007480 +007482 +007484 +007486 +007489 +007498 +007506 +007511 +007517 +007523 +007525 +007527 +007528 +007533 +007537 +007543 +007546 +007547 +007551 +007555 +007559 +007563 +007568 +007571 +007576 +007579 +007585 +007592 +007603 +007605 +007612 +007614 +007615 +007618 +007622 +007624 +007626 +007639 +007640 +007642 +007647 +007649 +007650 +007656 +007657 +007662 +007664 +007666 +007668 +007670 +007671 +007672 +007673 +007675 +007677 +007678 +007679 +007680 +007682 +007687 +007688 +007691 +007694 +007702 +007705 +007709 +007712 +007715 +007720 +007723 +007724 +007727 +007732 +007742 +007743 +007745 +007746 +007754 +007758 +007760 +007763 +007765 +007768 +007772 +007773 +007776 +007779 +007786 +007793 +007798 +007799 +007812 +007813 +007815 +007824 +007826 +007833 +007834 +007841 +007843 +007845 +007855 +007856 +007857 +007865 +007868 +007869 +007873 +007886 +007889 +007890 +007897 +007899 +007902 +007909 +007916 +007919 +007920 +007921 +007924 +007928 +007931 +007933 +007935 +007943 +007946 +007947 +007950 +007954 +007956 +007958 +007970 +007971 +007979 +007984 +007987 +007997 +007998 +007999 +008002 +008009 +008023 +008024 +008029 +008031 +008032 +008033 +008036 +008048 +008057 +008060 +008061 +008068 +008069 +008085 +008086 +008087 +008091 +008100 +008101 +008103 +008105 +008107 +008112 +008115 +008122 +008125 +008132 +008138 +008140 +008141 +008144 +008151 +008159 +008160 +008168 +008171 +008173 +008175 +008177 +008180 +008189 +008190 +008191 +008200 +008208 +008209 +008220 +008222 +008224 +008225 +008229 +008236 +008241 +008244 +008251 +008258 +008268 +008275 +008279 +008281 +008284 +008285 +008292 +008293 +008294 +008295 +008297 +008299 +008300 +008306 +008307 +008318 +008319 +008320 +008323 +008326 +008327 +008329 +008335 +008345 +008349 +008355 +008359 +008364 +008365 +008368 +008370 +008376 +008386 +008387 +008390 +008410 +008413 +008415 +008416 +008423 +008424 +008429 +008430 +008433 +008434 +008438 +008444 +008450 +008454 +008461 +008472 +008484 +008485 +008492 +008494 +008498 +008499 +008502 +008503 +008509 +008512 +008513 +008514 +008518 +008519 +008521 +008522 +008524 +008526 +008534 +008535 +008541 +008542 +008553 +008556 +008557 +008562 +008564 +008572 +008573 +008576 +008582 +008584 +008586 +008592 +008601 +008604 +008606 +008607 +008608 +008612 +008620 +008621 +008624 +008635 +008636 +008638 +008639 +008644 +008647 +008653 +008654 +008667 +008680 +008683 +008687 +008692 +008695 +008698 +008701 +008709 +008713 +008716 +008717 +008718 +008722 +008728 +008730 +008733 +008739 +008742 +008747 +008749 +008752 +008753 +008759 +008766 +008769 +008772 +008773 +008775 +008793 +008796 +008799 +008801 +008805 +008810 +008817 +008822 +008823 +008826 +008831 +008833 +008835 +008836 +008837 +008843 +008848 +008849 +008854 +008858 +008859 +008867 +008871 +008873 +008874 +008876 +008880 +008884 +008888 +008890 +008892 +008911 +008913 +008914 +008917 +008919 +008921 +008927 +008931 +008940 +008942 +008943 +008951 +008953 +008955 +008965 +008976 +008982 +008983 +008997 +009002 +009006 +009007 +009015 +009019 +009022 +009024 +009034 +009035 +009037 +009039 +009048 +009051 +009053 +009060 +009064 +009072 +009079 +009085 +009087 +009089 +009091 +009094 +009105 +009112 +009113 +009116 +009126 +009128 +009129 +009131 +009133 +009138 +009141 +009147 +009150 +009151 +009155 +009157 +009159 +009162 +009163 +009168 +009174 +009177 +009178 +009179 +009180 +009186 +009187 +009189 +009192 +009193 +009194 +009195 +009202 +009212 +009213 +009221 +009224 +009236 +009239 +009244 +009246 +009247 +009249 +009250 +009254 +009268 +009273 +009278 +009279 +009281 +009282 +009286 +009291 +009303 +009309 +009312 +009315 +009323 +009326 +009330 +009331 +009334 +009337 +009347 +009348 +009349 +009350 +009351 +009354 +009368 +009371 +009373 +009374 +009375 +009378 +009382 +009401 +009405 +009408 +009412 +009414 +009433 +009437 +009438 +009439 +009440 +009443 +009445 +009448 +009454 +009455 +009456 +009457 +009459 +009461 +009464 +009468 +009470 +009472 +009477 +009479 +009480 +009481 +009484 +009494 +009500 +009502 +009507 +009517 +009519 +009527 +009531 +009532 +009533 +009540 +009543 +009546 +009550 +009558 +009560 +009565 +009567 +009568 +009571 +009580 +009586 +009588 +009591 +009597 +009598 +009603 +009611 +009617 +009619 +009620 +009627 +009636 +009641 +009647 +009649 +009655 +009658 +009667 +009670 +009676 +009678 +009681 +009685 +009686 +009687 +009692 +009695 +009698 +009699 +009700 +009706 +009710 +009711 +009712 +009719 +009724 +009726 +009732 +009737 +009738 +009743 +009745 +009746 +009747 +009748 +009754 +009758 +009761 +009764 +009767 +009772 +009773 +009778 +009780 +009781 +009785 +009794 +009796 +009801 +009809 +009816 +009819 +009822 +009823 +009831 +009833 +009836 +009841 +009858 +009862 +009863 +009865 +009870 +009880 +009881 +009886 +009894 +009897 +009898 +009900 +009902 +009905 +009908 +009913 +009917 +009923 +009932 +009935 +009939 +009946 +009947 +009950 +009954 +009955 +009958 +2008_000002 +2008_000003 +2008_000007 +2008_000009 +2008_000016 +2008_000021 +2008_000026 +2008_000027 +2008_000032 +2008_000034 +2008_000042 +2008_000043 +2008_000050 +2008_000051 +2008_000052 +2008_000054 +2008_000056 +2008_000059 +2008_000062 +2008_000064 +2008_000067 +2008_000073 +2008_000075 +2008_000076 +2008_000078 +2008_000080 +2008_000082 +2008_000084 +2008_000090 +2008_000107 +2008_000115 +2008_000116 +2008_000119 +2008_000120 +2008_000123 +2008_000133 +2008_000134 +2008_000138 +2008_000140 +2008_000145 +2008_000149 +2008_000163 +2008_000174 +2008_000177 +2008_000182 +2008_000183 +2008_000190 +2008_000194 +2008_000195 +2008_000203 +2008_000204 +2008_000213 +2008_000215 +2008_000219 +2008_000222 +2008_000223 +2008_000233 +2008_000234 +2008_000239 +2008_000243 +2008_000244 +2008_000246 +2008_000251 +2008_000253 +2008_000254 +2008_000257 +2008_000261 +2008_000264 +2008_000268 +2008_000270 +2008_000271 +2008_000272 +2008_000274 +2008_000277 +2008_000278 +2008_000281 +2008_000298 +2008_000304 +2008_000305 +2008_000306 +2008_000307 +2008_000321 +2008_000328 +2008_000339 +2008_000340 +2008_000345 +2008_000354 +2008_000358 +2008_000359 +2008_000367 +2008_000373 +2008_000376 +2008_000378 +2008_000381 +2008_000382 +2008_000383 +2008_000391 +2008_000398 +2008_000401 +2008_000403 +2008_000406 +2008_000407 +2008_000408 +2008_000413 +2008_000414 +2008_000418 +2008_000419 +2008_000423 +2008_000424 +2008_000446 +2008_000452 +2008_000457 +2008_000464 +2008_000465 +2008_000466 +2008_000469 +2008_000472 +2008_000473 +2008_000474 +2008_000475 +2008_000481 +2008_000489 +2008_000492 +2008_000496 +2008_000498 +2008_000501 +2008_000510 +2008_000511 +2008_000516 +2008_000519 +2008_000522 +2008_000532 +2008_000533 +2008_000535 +2008_000536 +2008_000541 +2008_000547 +2008_000553 +2008_000558 +2008_000562 +2008_000564 +2008_000566 +2008_000568 +2008_000569 +2008_000573 +2008_000579 +2008_000581 +2008_000589 +2008_000599 +2008_000602 +2008_000605 +2008_000609 +2008_000614 +2008_000620 +2008_000622 +2008_000623 +2008_000629 +2008_000630 +2008_000634 +2008_000640 +2008_000647 +2008_000652 +2008_000656 +2008_000657 +2008_000659 +2008_000660 +2008_000661 +2008_000662 +2008_000666 +2008_000670 +2008_000673 +2008_000677 +2008_000690 +2008_000691 +2008_000695 +2008_000697 +2008_000699 +2008_000700 +2008_000705 +2008_000706 +2008_000714 +2008_000725 +2008_000727 +2008_000731 +2008_000734 +2008_000737 +2008_000740 +2008_000745 +2008_000748 +2008_000765 +2008_000769 +2008_000776 +2008_000780 +2008_000782 +2008_000783 +2008_000788 +2008_000793 +2008_000795 +2008_000796 +2008_000803 +2008_000804 +2008_000805 +2008_000806 +2008_000811 +2008_000817 +2008_000825 +2008_000828 +2008_000834 +2008_000835 +2008_000837 +2008_000839 +2008_000848 +2008_000853 +2008_000857 +2008_000858 +2008_000863 +2008_000864 +2008_000868 +2008_000876 +2008_000878 +2008_000880 +2008_000884 +2008_000885 +2008_000897 +2008_000904 +2008_000910 +2008_000911 +2008_000916 +2008_000917 +2008_000919 +2008_000922 +2008_000931 +2008_000936 +2008_000939 +2008_000940 +2008_000942 +2008_000943 +2008_000950 +2008_000952 +2008_000956 +2008_000957 +2008_000960 +2008_000964 +2008_000965 +2008_000971 +2008_000972 +2008_000976 +2008_000982 +2008_000984 +2008_000992 +2008_000993 +2008_001004 +2008_001007 +2008_001009 +2008_001012 +2008_001013 +2008_001024 +2008_001028 +2008_001034 +2008_001040 +2008_001041 +2008_001046 +2008_001055 +2008_001060 +2008_001062 +2008_001063 +2008_001066 +2008_001068 +2008_001070 +2008_001074 +2008_001075 +2008_001076 +2008_001077 +2008_001078 +2008_001080 +2008_001089 +2008_001090 +2008_001092 +2008_001098 +2008_001099 +2008_001111 +2008_001113 +2008_001114 +2008_001120 +2008_001121 +2008_001122 +2008_001135 +2008_001136 +2008_001139 +2008_001140 +2008_001142 +2008_001150 +2008_001154 +2008_001155 +2008_001160 +2008_001166 +2008_001167 +2008_001168 +2008_001170 +2008_001177 +2008_001183 +2008_001185 +2008_001192 +2008_001194 +2008_001199 +2008_001205 +2008_001210 +2008_001218 +2008_001220 +2008_001221 +2008_001225 +2008_001226 +2008_001227 +2008_001231 +2008_001236 +2008_001241 +2008_001248 +2008_001249 +2008_001255 +2008_001257 +2008_001260 +2008_001262 +2008_001264 +2008_001271 +2008_001275 +2008_001283 +2008_001284 +2008_001290 +2008_001296 +2008_001301 +2008_001304 +2008_001306 +2008_001308 +2008_001314 +2008_001318 +2008_001320 +2008_001322 +2008_001333 +2008_001334 +2008_001338 +2008_001340 +2008_001344 +2008_001349 +2008_001350 +2008_001353 +2008_001356 +2008_001366 +2008_001367 +2008_001369 +2008_001374 +2008_001376 +2008_001379 +2008_001380 +2008_001388 +2008_001391 +2008_001395 +2008_001401 +2008_001404 +2008_001406 +2008_001410 +2008_001415 +2008_001427 +2008_001428 +2008_001429 +2008_001430 +2008_001432 +2008_001433 +2008_001436 +2008_001437 +2008_001439 +2008_001445 +2008_001451 +2008_001456 +2008_001466 +2008_001468 +2008_001470 +2008_001475 +2008_001478 +2008_001481 +2008_001486 +2008_001491 +2008_001494 +2008_001503 +2008_001504 +2008_001513 +2008_001514 +2008_001516 +2008_001520 +2008_001522 +2008_001527 +2008_001531 +2008_001534 +2008_001536 +2008_001539 +2008_001540 +2008_001542 +2008_001543 +2008_001544 +2008_001546 +2008_001547 +2008_001549 +2008_001551 +2008_001553 +2008_001564 +2008_001574 +2008_001575 +2008_001580 +2008_001586 +2008_001589 +2008_001590 +2008_001593 +2008_001594 +2008_001596 +2008_001598 +2008_001602 +2008_001605 +2008_001607 +2008_001613 +2008_001619 +2008_001622 +2008_001624 +2008_001625 +2008_001629 +2008_001636 +2008_001638 +2008_001640 +2008_001648 +2008_001649 +2008_001655 +2008_001659 +2008_001660 +2008_001663 +2008_001667 +2008_001668 +2008_001669 +2008_001676 +2008_001680 +2008_001681 +2008_001682 +2008_001688 +2008_001697 +2008_001702 +2008_001712 +2008_001714 +2008_001715 +2008_001717 +2008_001722 +2008_001723 +2008_001727 +2008_001730 +2008_001731 +2008_001736 +2008_001742 +2008_001745 +2008_001750 +2008_001757 +2008_001763 +2008_001764 +2008_001765 +2008_001769 +2008_001772 +2008_001773 +2008_001774 +2008_001782 +2008_001784 +2008_001792 +2008_001799 +2008_001802 +2008_001805 +2008_001806 +2008_001808 +2008_001810 +2008_001812 +2008_001814 +2008_001815 +2008_001816 +2008_001820 +2008_001821 +2008_001823 +2008_001825 +2008_001830 +2008_001838 +2008_001841 +2008_001843 +2008_001850 +2008_001858 +2008_001862 +2008_001863 +2008_001867 +2008_001869 +2008_001871 +2008_001874 +2008_001885 +2008_001895 +2008_001899 +2008_001905 +2008_001907 +2008_001908 +2008_001909 +2008_001910 +2008_001914 +2008_001919 +2008_001920 +2008_001928 +2008_001930 +2008_001932 +2008_001934 +2008_001945 +2008_001946 +2008_001951 +2008_001958 +2008_001961 +2008_001965 +2008_001966 +2008_001969 +2008_001971 +2008_001978 +2008_001979 +2008_001985 +2008_001987 +2008_001989 +2008_001992 +2008_001998 +2008_002003 +2008_002004 +2008_002007 +2008_002011 +2008_002013 +2008_002017 +2008_002021 +2008_002026 +2008_002031 +2008_002035 +2008_002036 +2008_002037 +2008_002039 +2008_002042 +2008_002043 +2008_002045 +2008_002046 +2008_002047 +2008_002052 +2008_002069 +2008_002071 +2008_002082 +2008_002084 +2008_002086 +2008_002088 +2008_002092 +2008_002098 +2008_002099 +2008_002107 +2008_002113 +2008_002114 +2008_002115 +2008_002124 +2008_002132 +2008_002138 +2008_002140 +2008_002144 +2008_002146 +2008_002151 +2008_002152 +2008_002153 +2008_002155 +2008_002158 +2008_002167 +2008_002169 +2008_002172 +2008_002176 +2008_002179 +2008_002185 +2008_002191 +2008_002193 +2008_002194 +2008_002198 +2008_002199 +2008_002201 +2008_002205 +2008_002207 +2008_002209 +2008_002212 +2008_002222 +2008_002223 +2008_002231 +2008_002234 +2008_002239 +2008_002240 +2008_002241 +2008_002250 +2008_002259 +2008_002267 +2008_002269 +2008_002272 +2008_002273 +2008_002283 +2008_002292 +2008_002293 +2008_002298 +2008_002305 +2008_002312 +2008_002314 +2008_002321 +2008_002322 +2008_002324 +2008_002328 +2008_002330 +2008_002347 +2008_002349 +2008_002356 +2008_002358 +2008_002359 +2008_002366 +2008_002372 +2008_002374 +2008_002378 +2008_002379 +2008_002383 +2008_002384 +2008_002395 +2008_002401 +2008_002403 +2008_002404 +2008_002408 +2008_002410 +2008_002412 +2008_002414 +2008_002419 +2008_002424 +2008_002428 +2008_002429 +2008_002430 +2008_002436 +2008_002438 +2008_002439 +2008_002444 +2008_002445 +2008_002446 +2008_002451 +2008_002452 +2008_002454 +2008_002456 +2008_002464 +2008_002467 +2008_002470 +2008_002477 +2008_002481 +2008_002483 +2008_002485 +2008_002492 +2008_002494 +2008_002495 +2008_002499 +2008_002502 +2008_002504 +2008_002508 +2008_002509 +2008_002510 +2008_002512 +2008_002516 +2008_002521 +2008_002523 +2008_002526 +2008_002527 +2008_002536 +2008_002540 +2008_002542 +2008_002558 +2008_002564 +2008_002567 +2008_002576 +2008_002579 +2008_002588 +2008_002589 +2008_002590 +2008_002597 +2008_002598 +2008_002599 +2008_002603 +2008_002606 +2008_002610 +2008_002616 +2008_002623 +2008_002624 +2008_002631 +2008_002639 +2008_002640 +2008_002643 +2008_002645 +2008_002652 +2008_002653 +2008_002670 +2008_002673 +2008_002677 +2008_002678 +2008_002680 +2008_002681 +2008_002682 +2008_002684 +2008_002687 +2008_002696 +2008_002700 +2008_002701 +2008_002705 +2008_002709 +2008_002714 +2008_002715 +2008_002716 +2008_002720 +2008_002725 +2008_002732 +2008_002735 +2008_002738 +2008_002746 +2008_002751 +2008_002752 +2008_002753 +2008_002756 +2008_002766 +2008_002768 +2008_002773 +2008_002775 +2008_002778 +2008_002783 +2008_002789 +2008_002792 +2008_002795 +2008_002801 +2008_002806 +2008_002809 +2008_002811 +2008_002814 +2008_002817 +2008_002820 +2008_002826 +2008_002829 +2008_002830 +2008_002831 +2008_002835 +2008_002838 +2008_002843 +2008_002845 +2008_002847 +2008_002852 +2008_002859 +2008_002860 +2008_002864 +2008_002869 +2008_002870 +2008_002875 +2008_002876 +2008_002879 +2008_002882 +2008_002883 +2008_002897 +2008_002899 +2008_002900 +2008_002904 +2008_002906 +2008_002908 +2008_002909 +2008_002910 +2008_002920 +2008_002926 +2008_002929 +2008_002932 +2008_002936 +2008_002942 +2008_002946 +2008_002947 +2008_002956 +2008_002958 +2008_002965 +2008_002968 +2008_002971 +2008_002973 +2008_002992 +2008_002999 +2008_003001 +2008_003003 +2008_003005 +2008_003008 +2008_003020 +2008_003022 +2008_003026 +2008_003030 +2008_003034 +2008_003045 +2008_003051 +2008_003052 +2008_003053 +2008_003055 +2008_003056 +2008_003062 +2008_003067 +2008_003072 +2008_003073 +2008_003076 +2008_003082 +2008_003088 +2008_003089 +2008_003090 +2008_003095 +2008_003100 +2008_003104 +2008_003105 +2008_003106 +2008_003107 +2008_003108 +2008_003110 +2008_003132 +2008_003133 +2008_003135 +2008_003136 +2008_003141 +2008_003144 +2008_003152 +2008_003155 +2008_003161 +2008_003167 +2008_003170 +2008_003178 +2008_003181 +2008_003186 +2008_003187 +2008_003189 +2008_003193 +2008_003202 +2008_003205 +2008_003210 +2008_003211 +2008_003220 +2008_003222 +2008_003225 +2008_003228 +2008_003232 +2008_003238 +2008_003239 +2008_003245 +2008_003249 +2008_003255 +2008_003256 +2008_003263 +2008_003270 +2008_003271 +2008_003278 +2008_003280 +2008_003286 +2008_003289 +2008_003291 +2008_003295 +2008_003300 +2008_003305 +2008_003316 +2008_003320 +2008_003326 +2008_003330 +2008_003331 +2008_003333 +2008_003334 +2008_003336 +2008_003343 +2008_003344 +2008_003348 +2008_003350 +2008_003351 +2008_003359 +2008_003361 +2008_003369 +2008_003374 +2008_003379 +2008_003382 +2008_003384 +2008_003395 +2008_003402 +2008_003405 +2008_003407 +2008_003420 +2008_003423 +2008_003424 +2008_003432 +2008_003433 +2008_003439 +2008_003442 +2008_003443 +2008_003449 +2008_003451 +2008_003453 +2008_003461 +2008_003464 +2008_003466 +2008_003467 +2008_003472 +2008_003475 +2008_003476 +2008_003477 +2008_003479 +2008_003482 +2008_003483 +2008_003484 +2008_003492 +2008_003493 +2008_003499 +2008_003511 +2008_003514 +2008_003519 +2008_003524 +2008_003526 +2008_003531 +2008_003542 +2008_003545 +2008_003546 +2008_003547 +2008_003552 +2008_003557 +2008_003565 +2008_003572 +2008_003576 +2008_003577 +2008_003580 +2008_003591 +2008_003592 +2008_003593 +2008_003598 +2008_003604 +2008_003607 +2008_003609 +2008_003610 +2008_003613 +2008_003618 +2008_003619 +2008_003621 +2008_003624 +2008_003636 +2008_003638 +2008_003647 +2008_003650 +2008_003658 +2008_003662 +2008_003671 +2008_003672 +2008_003673 +2008_003676 +2008_003680 +2008_003681 +2008_003683 +2008_003684 +2008_003694 +2008_003704 +2008_003709 +2008_003713 +2008_003718 +2008_003720 +2008_003721 +2008_003722 +2008_003733 +2008_003737 +2008_003743 +2008_003744 +2008_003745 +2008_003749 +2008_003753 +2008_003754 +2008_003755 +2008_003756 +2008_003763 +2008_003766 +2008_003767 +2008_003768 +2008_003772 +2008_003775 +2008_003777 +2008_003780 +2008_003782 +2008_003789 +2008_003793 +2008_003794 +2008_003799 +2008_003800 +2008_003801 +2008_003805 +2008_003812 +2008_003813 +2008_003820 +2008_003821 +2008_003825 +2008_003826 +2008_003827 +2008_003829 +2008_003830 +2008_003835 +2008_003838 +2008_003840 +2008_003843 +2008_003844 +2008_003846 +2008_003856 +2008_003858 +2008_003860 +2008_003868 +2008_003873 +2008_003874 +2008_003876 +2008_003881 +2008_003884 +2008_003885 +2008_003886 +2008_003894 +2008_003904 +2008_003905 +2008_003915 +2008_003921 +2008_003924 +2008_003926 +2008_003929 +2008_003932 +2008_003933 +2008_003940 +2008_003941 +2008_003943 +2008_003944 +2008_003945 +2008_003948 +2008_003951 +2008_003958 +2008_003962 +2008_003965 +2008_003969 +2008_003971 +2008_003976 +2008_003983 +2008_003988 +2008_003989 +2008_003996 +2008_003997 +2008_004002 +2008_004003 +2008_004006 +2008_004007 +2008_004015 +2008_004016 +2008_004018 +2008_004020 +2008_004027 +2008_004030 +2008_004040 +2008_004045 +2008_004046 +2008_004048 +2008_004054 +2008_004056 +2008_004058 +2008_004064 +2008_004069 +2008_004071 +2008_004075 +2008_004076 +2008_004081 +2008_004088 +2008_004090 +2008_004093 +2008_004101 +2008_004103 +2008_004105 +2008_004119 +2008_004121 +2008_004123 +2008_004124 +2008_004125 +2008_004126 +2008_004127 +2008_004135 +2008_004137 +2008_004140 +2008_004142 +2008_004155 +2008_004166 +2008_004174 +2008_004175 +2008_004178 +2008_004182 +2008_004188 +2008_004189 +2008_004190 +2008_004198 +2008_004203 +2008_004205 +2008_004212 +2008_004213 +2008_004214 +2008_004216 +2008_004221 +2008_004230 +2008_004234 +2008_004242 +2008_004243 +2008_004245 +2008_004251 +2008_004257 +2008_004258 +2008_004263 +2008_004270 +2008_004271 +2008_004273 +2008_004278 +2008_004279 +2008_004289 +2008_004290 +2008_004292 +2008_004297 +2008_004306 +2008_004308 +2008_004312 +2008_004317 +2008_004318 +2008_004324 +2008_004326 +2008_004327 +2008_004330 +2008_004333 +2008_004339 +2008_004344 +2008_004345 +2008_004347 +2008_004348 +2008_004354 +2008_004357 +2008_004361 +2008_004363 +2008_004367 +2008_004371 +2008_004374 +2008_004384 +2008_004389 +2008_004391 +2008_004394 +2008_004396 +2008_004399 +2008_004402 +2008_004406 +2008_004408 +2008_004414 +2008_004417 +2008_004419 +2008_004422 +2008_004425 +2008_004426 +2008_004427 +2008_004431 +2008_004433 +2008_004438 +2008_004445 +2008_004453 +2008_004455 +2008_004459 +2008_004460 +2008_004464 +2008_004469 +2008_004470 +2008_004471 +2008_004476 +2008_004477 +2008_004478 +2008_004479 +2008_004482 +2008_004487 +2008_004497 +2008_004498 +2008_004501 +2008_004502 +2008_004504 +2008_004510 +2008_004520 +2008_004522 +2008_004525 +2008_004526 +2008_004528 +2008_004533 +2008_004534 +2008_004538 +2008_004540 +2008_004541 +2008_004546 +2008_004549 +2008_004550 +2008_004552 +2008_004553 +2008_004554 +2008_004564 +2008_004575 +2008_004579 +2008_004589 +2008_004592 +2008_004599 +2008_004605 +2008_004606 +2008_004610 +2008_004612 +2008_004613 +2008_004614 +2008_004615 +2008_004619 +2008_004621 +2008_004624 +2008_004630 +2008_004632 +2008_004633 +2008_004640 +2008_004646 +2008_004647 +2008_004653 +2008_004654 +2008_004656 +2008_004659 +2008_004662 +2008_004665 +2008_004670 +2008_004684 +2008_004687 +2008_004688 +2008_004689 +2008_004695 +2008_004696 +2008_004701 +2008_004702 +2008_004704 +2008_004705 +2008_004706 +2008_004711 +2008_004716 +2008_004718 +2008_004720 +2008_004722 +2008_004726 +2008_004729 +2008_004730 +2008_004736 +2008_004740 +2008_004742 +2008_004745 +2008_004754 +2008_004756 +2008_004758 +2008_004760 +2008_004766 +2008_004768 +2008_004771 +2008_004774 +2008_004778 +2008_004784 +2008_004794 +2008_004795 +2008_004797 +2008_004805 +2008_004812 +2008_004814 +2008_004819 +2008_004825 +2008_004832 +2008_004833 +2008_004837 +2008_004851 +2008_004852 +2008_004854 +2008_004862 +2008_004866 +2008_004873 +2008_004875 +2008_004881 +2008_004885 +2008_004887 +2008_004894 +2008_004896 +2008_004898 +2008_004900 +2008_004904 +2008_004907 +2008_004910 +2008_004921 +2008_004923 +2008_004926 +2008_004930 +2008_004933 +2008_004935 +2008_004940 +2008_004942 +2008_004948 +2008_004955 +2008_004967 +2008_004968 +2008_004974 +2008_004975 +2008_004979 +2008_004982 +2008_004984 +2008_004986 +2008_004995 +2008_005001 +2008_005003 +2008_005008 +2008_005010 +2008_005015 +2008_005023 +2008_005032 +2008_005035 +2008_005037 +2008_005043 +2008_005046 +2008_005049 +2008_005054 +2008_005057 +2008_005061 +2008_005063 +2008_005065 +2008_005068 +2008_005070 +2008_005072 +2008_005085 +2008_005089 +2008_005092 +2008_005096 +2008_005097 +2008_005098 +2008_005105 +2008_005107 +2008_005109 +2008_005110 +2008_005111 +2008_005114 +2008_005117 +2008_005123 +2008_005139 +2008_005140 +2008_005147 +2008_005151 +2008_005156 +2008_005160 +2008_005166 +2008_005167 +2008_005175 +2008_005181 +2008_005182 +2008_005183 +2008_005185 +2008_005190 +2008_005191 +2008_005194 +2008_005197 +2008_005204 +2008_005205 +2008_005208 +2008_005215 +2008_005217 +2008_005233 +2008_005235 +2008_005242 +2008_005243 +2008_005244 +2008_005245 +2008_005251 +2008_005252 +2008_005253 +2008_005254 +2008_005255 +2008_005257 +2008_005260 +2008_005261 +2008_005270 +2008_005272 +2008_005276 +2008_005277 +2008_005282 +2008_005288 +2008_005304 +2008_005309 +2008_005313 +2008_005316 +2008_005319 +2008_005323 +2008_005327 +2008_005335 +2008_005337 +2008_005338 +2008_005346 +2008_005347 +2008_005348 +2008_005356 +2008_005359 +2008_005360 +2008_005361 +2008_005369 +2008_005373 +2008_005374 +2008_005378 +2008_005379 +2008_005393 +2008_005398 +2008_005399 +2008_005404 +2008_005406 +2008_005417 +2008_005421 +2008_005422 +2008_005423 +2008_005427 +2008_005431 +2008_005436 +2008_005439 +2008_005444 +2008_005445 +2008_005446 +2008_005447 +2008_005455 +2008_005460 +2008_005467 +2008_005469 +2008_005472 +2008_005480 +2008_005485 +2008_005490 +2008_005498 +2008_005501 +2008_005504 +2008_005510 +2008_005511 +2008_005522 +2008_005525 +2008_005530 +2008_005534 +2008_005538 +2008_005544 +2008_005548 +2008_005550 +2008_005552 +2008_005553 +2008_005563 +2008_005564 +2008_005566 +2008_005573 +2008_005574 +2008_005582 +2008_005588 +2008_005599 +2008_005601 +2008_005608 +2008_005611 +2008_005612 +2008_005614 +2008_005627 +2008_005628 +2008_005631 +2008_005633 +2008_005635 +2008_005637 +2008_005638 +2008_005642 +2008_005643 +2008_005646 +2008_005649 +2008_005652 +2008_005657 +2008_005660 +2008_005663 +2008_005664 +2008_005676 +2008_005677 +2008_005680 +2008_005681 +2008_005685 +2008_005686 +2008_005687 +2008_005691 +2008_005695 +2008_005699 +2008_005701 +2008_005702 +2008_005703 +2008_005720 +2008_005721 +2008_005726 +2008_005727 +2008_005732 +2008_005734 +2008_005735 +2008_005738 +2008_005748 +2008_005750 +2008_005763 +2008_005764 +2008_005768 +2008_005774 +2008_005777 +2008_005779 +2008_005788 +2008_005790 +2008_005792 +2008_005796 +2008_005798 +2008_005801 +2008_005805 +2008_005808 +2008_005812 +2008_005816 +2008_005821 +2008_005825 +2008_005831 +2008_005838 +2008_005846 +2008_005847 +2008_005848 +2008_005857 +2008_005860 +2008_005863 +2008_005865 +2008_005869 +2008_005875 +2008_005877 +2008_005881 +2008_005883 +2008_005884 +2008_005889 +2008_005895 +2008_005898 +2008_005904 +2008_005907 +2008_005914 +2008_005915 +2008_005918 +2008_005924 +2008_005928 +2008_005933 +2008_005934 +2008_005936 +2008_005939 +2008_005943 +2008_005957 +2008_005962 +2008_005964 +2008_005970 +2008_005975 +2008_005977 +2008_005978 +2008_005980 +2008_005984 +2008_005987 +2008_005989 +2008_006002 +2008_006007 +2008_006008 +2008_006010 +2008_006017 +2008_006021 +2008_006024 +2008_006027 +2008_006028 +2008_006031 +2008_006034 +2008_006036 +2008_006037 +2008_006038 +2008_006041 +2008_006042 +2008_006045 +2008_006047 +2008_006050 +2008_006052 +2008_006055 +2008_006058 +2008_006059 +2008_006063 +2008_006068 +2008_006071 +2008_006072 +2008_006082 +2008_006087 +2008_006088 +2008_006094 +2008_006100 +2008_006104 +2008_006108 +2008_006109 +2008_006112 +2008_006113 +2008_006117 +2008_006120 +2008_006130 +2008_006143 +2008_006144 +2008_006147 +2008_006148 +2008_006151 +2008_006154 +2008_006159 +2008_006163 +2008_006166 +2008_006169 +2008_006175 +2008_006178 +2008_006179 +2008_006185 +2008_006190 +2008_006200 +2008_006203 +2008_006205 +2008_006207 +2008_006216 +2008_006218 +2008_006219 +2008_006222 +2008_006227 +2008_006229 +2008_006233 +2008_006239 +2008_006240 +2008_006254 +2008_006262 +2008_006267 +2008_006269 +2008_006275 +2008_006282 +2008_006285 +2008_006288 +2008_006290 +2008_006298 +2008_006303 +2008_006307 +2008_006310 +2008_006311 +2008_006316 +2008_006320 +2008_006325 +2008_006327 +2008_006330 +2008_006337 +2008_006341 +2008_006347 +2008_006356 +2008_006359 +2008_006362 +2008_006366 +2008_006368 +2008_006377 +2008_006382 +2008_006392 +2008_006394 +2008_006397 +2008_006403 +2008_006407 +2008_006408 +2008_006409 +2008_006416 +2008_006424 +2008_006425 +2008_006429 +2008_006438 +2008_006441 +2008_006449 +2008_006452 +2008_006458 +2008_006463 +2008_006467 +2008_006477 +2008_006480 +2008_006487 +2008_006489 +2008_006502 +2008_006503 +2008_006517 +2008_006519 +2008_006520 +2008_006523 +2008_006524 +2008_006526 +2008_006528 +2008_006530 +2008_006534 +2008_006547 +2008_006548 +2008_006553 +2008_006554 +2008_006568 +2008_006576 +2008_006587 +2008_006588 +2008_006591 +2008_006600 +2008_006604 +2008_006605 +2008_006609 +2008_006611 +2008_006614 +2008_006616 +2008_006617 +2008_006621 +2008_006631 +2008_006635 +2008_006642 +2008_006646 +2008_006656 +2008_006660 +2008_006662 +2008_006665 +2008_006671 +2008_006684 +2008_006686 +2008_006690 +2008_006694 +2008_006696 +2008_006701 +2008_006703 +2008_006708 +2008_006710 +2008_006714 +2008_006716 +2008_006722 +2008_006731 +2008_006732 +2008_006743 +2008_006746 +2008_006747 +2008_006752 +2008_006758 +2008_006765 +2008_006773 +2008_006774 +2008_006776 +2008_006777 +2008_006779 +2008_006781 +2008_006784 +2008_006792 +2008_006793 +2008_006796 +2008_006797 +2008_006798 +2008_006800 +2008_006811 +2008_006813 +2008_006815 +2008_006816 +2008_006817 +2008_006824 +2008_006825 +2008_006828 +2008_006831 +2008_006833 +2008_006835 +2008_006837 +2008_006839 +2008_006841 +2008_006844 +2008_006849 +2008_006855 +2008_006863 +2008_006870 +2008_006874 +2008_006880 +2008_006885 +2008_006887 +2008_006890 +2008_006892 +2008_006896 +2008_006900 +2008_006904 +2008_006907 +2008_006912 +2008_006924 +2008_006925 +2008_006939 +2008_006941 +2008_006944 +2008_006948 +2008_006949 +2008_006951 +2008_006952 +2008_006956 +2008_006959 +2008_006967 +2008_006968 +2008_006979 +2008_006980 +2008_006981 +2008_006986 +2008_006987 +2008_006989 +2008_006991 +2008_006997 +2008_006998 +2008_006999 +2008_007006 +2008_007010 +2008_007019 +2008_007021 +2008_007025 +2008_007031 +2008_007032 +2008_007034 +2008_007042 +2008_007048 +2008_007050 +2008_007056 +2008_007057 +2008_007059 +2008_007064 +2008_007067 +2008_007070 +2008_007084 +2008_007086 +2008_007091 +2008_007096 +2008_007103 +2008_007105 +2008_007108 +2008_007112 +2008_007114 +2008_007119 +2008_007120 +2008_007123 +2008_007130 +2008_007133 +2008_007134 +2008_007143 +2008_007163 +2008_007164 +2008_007166 +2008_007167 +2008_007171 +2008_007176 +2008_007181 +2008_007182 +2008_007184 +2008_007187 +2008_007188 +2008_007189 +2008_007190 +2008_007194 +2008_007195 +2008_007196 +2008_007207 +2008_007214 +2008_007216 +2008_007219 +2008_007222 +2008_007225 +2008_007227 +2008_007229 +2008_007231 +2008_007241 +2008_007247 +2008_007250 +2008_007256 +2008_007264 +2008_007266 +2008_007269 +2008_007273 +2008_007277 +2008_007279 +2008_007282 +2008_007285 +2008_007287 +2008_007293 +2008_007295 +2008_007305 +2008_007311 +2008_007314 +2008_007317 +2008_007319 +2008_007323 +2008_007324 +2008_007327 +2008_007332 +2008_007334 +2008_007336 +2008_007339 +2008_007344 +2008_007348 +2008_007350 +2008_007352 +2008_007358 +2008_007374 +2008_007378 +2008_007384 +2008_007389 +2008_007390 +2008_007392 +2008_007393 +2008_007398 +2008_007402 +2008_007403 +2008_007404 +2008_007409 +2008_007415 +2008_007417 +2008_007430 +2008_007431 +2008_007434 +2008_007435 +2008_007441 +2008_007446 +2008_007452 +2008_007455 +2008_007458 +2008_007459 +2008_007461 +2008_007466 +2008_007476 +2008_007478 +2008_007480 +2008_007488 +2008_007494 +2008_007497 +2008_007498 +2008_007501 +2008_007507 +2008_007513 +2008_007514 +2008_007521 +2008_007525 +2008_007527 +2008_007529 +2008_007531 +2008_007534 +2008_007536 +2008_007538 +2008_007548 +2008_007558 +2008_007561 +2008_007567 +2008_007574 +2008_007579 +2008_007583 +2008_007585 +2008_007586 +2008_007587 +2008_007591 +2008_007594 +2008_007595 +2008_007596 +2008_007599 +2008_007610 +2008_007612 +2008_007617 +2008_007618 +2008_007623 +2008_007632 +2008_007635 +2008_007643 +2008_007649 +2008_007656 +2008_007661 +2008_007662 +2008_007668 +2008_007669 +2008_007673 +2008_007676 +2008_007677 +2008_007685 +2008_007688 +2008_007690 +2008_007693 +2008_007694 +2008_007702 +2008_007704 +2008_007706 +2008_007714 +2008_007716 +2008_007719 +2008_007729 +2008_007733 +2008_007735 +2008_007736 +2008_007737 +2008_007738 +2008_007739 +2008_007741 +2008_007745 +2008_007749 +2008_007757 +2008_007760 +2008_007764 +2008_007766 +2008_007768 +2008_007787 +2008_007791 +2008_007793 +2008_007794 +2008_007797 +2008_007798 +2008_007804 +2008_007806 +2008_007811 +2008_007814 +2008_007816 +2008_007819 +2008_007823 +2008_007827 +2008_007828 +2008_007831 +2008_007836 +2008_007839 +2008_007841 +2008_007850 +2008_007853 +2008_007854 +2008_007855 +2008_007871 +2008_007872 +2008_007875 +2008_007884 +2008_007887 +2008_007888 +2008_007890 +2008_007893 +2008_007902 +2008_007914 +2008_007915 +2008_007917 +2008_007922 +2008_007923 +2008_007931 +2008_007932 +2008_007933 +2008_007935 +2008_007936 +2008_007942 +2008_007945 +2008_007948 +2008_007949 +2008_007954 +2008_007955 +2008_007964 +2008_007966 +2008_007970 +2008_007981 +2008_007986 +2008_007989 +2008_007993 +2008_007994 +2008_008001 +2008_008011 +2008_008022 +2008_008024 +2008_008025 +2008_008029 +2008_008040 +2008_008044 +2008_008050 +2008_008052 +2008_008053 +2008_008055 +2008_008057 +2008_008066 +2008_008069 +2008_008075 +2008_008084 +2008_008086 +2008_008091 +2008_008093 +2008_008096 +2008_008103 +2008_008105 +2008_008109 +2008_008113 +2008_008115 +2008_008120 +2008_008123 +2008_008127 +2008_008130 +2008_008131 +2008_008134 +2008_008141 +2008_008145 +2008_008146 +2008_008155 +2008_008175 +2008_008177 +2008_008179 +2008_008185 +2008_008190 +2008_008191 +2008_008192 +2008_008203 +2008_008208 +2008_008210 +2008_008217 +2008_008221 +2008_008224 +2008_008231 +2008_008232 +2008_008233 +2008_008234 +2008_008235 +2008_008241 +2008_008246 +2008_008252 +2008_008254 +2008_008257 +2008_008268 +2008_008269 +2008_008271 +2008_008272 +2008_008278 +2008_008279 +2008_008284 +2008_008292 +2008_008296 +2008_008297 +2008_008301 +2008_008302 +2008_008307 +2008_008310 +2008_008313 +2008_008314 +2008_008318 +2008_008320 +2008_008322 +2008_008331 +2008_008335 +2008_008336 +2008_008337 +2008_008341 +2008_008346 +2008_008354 +2008_008357 +2008_008359 +2008_008362 +2008_008365 +2008_008373 +2008_008376 +2008_008377 +2008_008379 +2008_008380 +2008_008387 +2008_008388 +2008_008392 +2008_008393 +2008_008395 +2008_008406 +2008_008421 +2008_008424 +2008_008429 +2008_008433 +2008_008434 +2008_008435 +2008_008437 +2008_008439 +2008_008443 +2008_008444 +2008_008446 +2008_008450 +2008_008453 +2008_008461 +2008_008466 +2008_008467 +2008_008469 +2008_008470 +2008_008474 +2008_008488 +2008_008500 +2008_008501 +2008_008506 +2008_008512 +2008_008519 +2008_008524 +2008_008527 +2008_008531 +2008_008536 +2008_008537 +2008_008538 +2008_008552 +2008_008554 +2008_008564 +2008_008570 +2008_008574 +2008_008585 +2008_008588 +2008_008595 +2008_008598 +2008_008606 +2008_008611 +2008_008613 +2008_008615 +2008_008617 +2008_008619 +2008_008621 +2008_008622 +2008_008627 +2008_008628 +2008_008629 +2008_008632 +2008_008636 +2008_008652 +2008_008658 +2008_008659 +2008_008662 +2008_008675 +2008_008676 +2008_008679 +2008_008682 +2008_008683 +2008_008684 +2008_008690 +2008_008695 +2008_008700 +2008_008705 +2008_008708 +2008_008711 +2008_008713 +2008_008718 +2008_008724 +2008_008726 +2008_008732 +2008_008739 +2008_008746 +2008_008751 +2008_008753 +2008_008755 +2008_008758 +2008_008765 +2008_008767 +2008_008772 +2009_000001 +2009_000002 +2009_000009 +2009_000011 +2009_000012 +2009_000013 +2009_000017 +2009_000022 +2009_000026 +2009_000032 +2009_000035 +2009_000037 +2009_000039 +2009_000041 +2009_000045 +2009_000051 +2009_000055 +2009_000060 +2009_000063 +2009_000066 +2009_000067 +2009_000068 +2009_000072 +2009_000074 +2009_000078 +2009_000080 +2009_000084 +2009_000087 +2009_000089 +2009_000090 +2009_000093 +2009_000096 +2009_000097 +2009_000102 +2009_000121 +2009_000124 +2009_000136 +2009_000142 +2009_000146 +2009_000149 +2009_000156 +2009_000157 +2009_000158 +2009_000165 +2009_000169 +2009_000171 +2009_000181 +2009_000182 +2009_000183 +2009_000184 +2009_000189 +2009_000192 +2009_000198 +2009_000199 +2009_000201 +2009_000205 +2009_000206 +2009_000212 +2009_000214 +2009_000216 +2009_000219 +2009_000225 +2009_000242 +2009_000244 +2009_000247 +2009_000249 +2009_000254 +2009_000257 +2009_000260 +2009_000276 +2009_000282 +2009_000283 +2009_000284 +2009_000286 +2009_000288 +2009_000291 +2009_000293 +2009_000297 +2009_000298 +2009_000300 +2009_000304 +2009_000305 +2009_000308 +2009_000309 +2009_000312 +2009_000316 +2009_000318 +2009_000321 +2009_000328 +2009_000330 +2009_000335 +2009_000337 +2009_000342 +2009_000351 +2009_000354 +2009_000356 +2009_000366 +2009_000370 +2009_000378 +2009_000387 +2009_000389 +2009_000391 +2009_000397 +2009_000398 +2009_000399 +2009_000402 +2009_000410 +2009_000411 +2009_000412 +2009_000414 +2009_000417 +2009_000418 +2009_000421 +2009_000422 +2009_000426 +2009_000430 +2009_000435 +2009_000440 +2009_000446 +2009_000453 +2009_000455 +2009_000456 +2009_000457 +2009_000461 +2009_000466 +2009_000469 +2009_000472 +2009_000483 +2009_000487 +2009_000488 +2009_000496 +2009_000499 +2009_000501 +2009_000511 +2009_000512 +2009_000513 +2009_000516 +2009_000519 +2009_000523 +2009_000526 +2009_000536 +2009_000542 +2009_000545 +2009_000549 +2009_000550 +2009_000552 +2009_000558 +2009_000559 +2009_000563 +2009_000566 +2009_000568 +2009_000573 +2009_000574 +2009_000590 +2009_000593 +2009_000597 +2009_000606 +2009_000608 +2009_000611 +2009_000614 +2009_000615 +2009_000619 +2009_000624 +2009_000625 +2009_000628 +2009_000631 +2009_000634 +2009_000637 +2009_000641 +2009_000647 +2009_000648 +2009_000658 +2009_000661 +2009_000664 +2009_000670 +2009_000674 +2009_000675 +2009_000676 +2009_000677 +2009_000681 +2009_000683 +2009_000689 +2009_000691 +2009_000702 +2009_000704 +2009_000705 +2009_000712 +2009_000716 +2009_000719 +2009_000723 +2009_000724 +2009_000725 +2009_000726 +2009_000727 +2009_000730 +2009_000731 +2009_000732 +2009_000734 +2009_000741 +2009_000742 +2009_000756 +2009_000758 +2009_000760 +2009_000762 +2009_000763 +2009_000771 +2009_000778 +2009_000779 +2009_000782 +2009_000783 +2009_000791 +2009_000811 +2009_000812 +2009_000817 +2009_000820 +2009_000821 +2009_000823 +2009_000824 +2009_000825 +2009_000828 +2009_000829 +2009_000830 +2009_000837 +2009_000839 +2009_000840 +2009_000843 +2009_000845 +2009_000846 +2009_000851 +2009_000852 +2009_000856 +2009_000858 +2009_000862 +2009_000865 +2009_000871 +2009_000879 +2009_000886 +2009_000890 +2009_000892 +2009_000896 +2009_000897 +2009_000898 +2009_000901 +2009_000904 +2009_000909 +2009_000919 +2009_000923 +2009_000924 +2009_000925 +2009_000928 +2009_000931 +2009_000934 +2009_000935 +2009_000939 +2009_000948 +2009_000954 +2009_000955 +2009_000958 +2009_000960 +2009_000964 +2009_000966 +2009_000971 +2009_000985 +2009_000989 +2009_000991 +2009_000992 +2009_000995 +2009_000998 +2009_001000 +2009_001006 +2009_001007 +2009_001008 +2009_001011 +2009_001016 +2009_001021 +2009_001024 +2009_001026 +2009_001028 +2009_001030 +2009_001038 +2009_001044 +2009_001054 +2009_001055 +2009_001057 +2009_001061 +2009_001066 +2009_001069 +2009_001075 +2009_001082 +2009_001083 +2009_001084 +2009_001090 +2009_001094 +2009_001097 +2009_001106 +2009_001108 +2009_001113 +2009_001118 +2009_001120 +2009_001121 +2009_001126 +2009_001128 +2009_001134 +2009_001139 +2009_001148 +2009_001155 +2009_001160 +2009_001164 +2009_001166 +2009_001181 +2009_001184 +2009_001194 +2009_001195 +2009_001196 +2009_001198 +2009_001207 +2009_001212 +2009_001215 +2009_001225 +2009_001227 +2009_001240 +2009_001242 +2009_001243 +2009_001245 +2009_001249 +2009_001252 +2009_001255 +2009_001257 +2009_001259 +2009_001266 +2009_001278 +2009_001279 +2009_001286 +2009_001288 +2009_001289 +2009_001299 +2009_001300 +2009_001309 +2009_001313 +2009_001314 +2009_001316 +2009_001320 +2009_001321 +2009_001322 +2009_001326 +2009_001332 +2009_001333 +2009_001343 +2009_001345 +2009_001348 +2009_001349 +2009_001350 +2009_001355 +2009_001361 +2009_001363 +2009_001366 +2009_001367 +2009_001370 +2009_001371 +2009_001384 +2009_001387 +2009_001391 +2009_001393 +2009_001397 +2009_001398 +2009_001406 +2009_001407 +2009_001409 +2009_001411 +2009_001413 +2009_001414 +2009_001417 +2009_001419 +2009_001426 +2009_001427 +2009_001431 +2009_001433 +2009_001437 +2009_001440 +2009_001447 +2009_001449 +2009_001456 +2009_001468 +2009_001470 +2009_001479 +2009_001484 +2009_001490 +2009_001498 +2009_001501 +2009_001505 +2009_001509 +2009_001518 +2009_001519 +2009_001521 +2009_001522 +2009_001526 +2009_001534 +2009_001535 +2009_001536 +2009_001539 +2009_001549 +2009_001554 +2009_001562 +2009_001565 +2009_001568 +2009_001575 +2009_001577 +2009_001581 +2009_001587 +2009_001591 +2009_001593 +2009_001594 +2009_001606 +2009_001607 +2009_001617 +2009_001618 +2009_001621 +2009_001623 +2009_001627 +2009_001631 +2009_001633 +2009_001635 +2009_001643 +2009_001644 +2009_001645 +2009_001646 +2009_001648 +2009_001653 +2009_001663 +2009_001667 +2009_001673 +2009_001675 +2009_001682 +2009_001683 +2009_001684 +2009_001687 +2009_001696 +2009_001699 +2009_001707 +2009_001709 +2009_001713 +2009_001718 +2009_001723 +2009_001731 +2009_001733 +2009_001738 +2009_001741 +2009_001743 +2009_001752 +2009_001754 +2009_001758 +2009_001759 +2009_001764 +2009_001765 +2009_001767 +2009_001768 +2009_001774 +2009_001775 +2009_001778 +2009_001780 +2009_001784 +2009_001794 +2009_001799 +2009_001804 +2009_001805 +2009_001810 +2009_001811 +2009_001816 +2009_001818 +2009_001820 +2009_001822 +2009_001823 +2009_001830 +2009_001833 +2009_001835 +2009_001839 +2009_001848 +2009_001850 +2009_001851 +2009_001852 +2009_001853 +2009_001854 +2009_001858 +2009_001864 +2009_001881 +2009_001890 +2009_001905 +2009_001906 +2009_001907 +2009_001909 +2009_001911 +2009_001915 +2009_001916 +2009_001929 +2009_001931 +2009_001940 +2009_001941 +2009_001945 +2009_001949 +2009_001965 +2009_001967 +2009_001971 +2009_001973 +2009_001976 +2009_001977 +2009_001979 +2009_001980 +2009_001984 +2009_001988 +2009_001991 +2009_002001 +2009_002002 +2009_002003 +2009_002008 +2009_002009 +2009_002011 +2009_002012 +2009_002024 +2009_002031 +2009_002035 +2009_002039 +2009_002042 +2009_002046 +2009_002047 +2009_002053 +2009_002055 +2009_002056 +2009_002058 +2009_002061 +2009_002073 +2009_002078 +2009_002082 +2009_002087 +2009_002094 +2009_002097 +2009_002110 +2009_002111 +2009_002122 +2009_002127 +2009_002128 +2009_002131 +2009_002136 +2009_002137 +2009_002139 +2009_002141 +2009_002144 +2009_002150 +2009_002155 +2009_002164 +2009_002165 +2009_002169 +2009_002171 +2009_002175 +2009_002177 +2009_002185 +2009_002191 +2009_002194 +2009_002199 +2009_002202 +2009_002205 +2009_002208 +2009_002211 +2009_002212 +2009_002215 +2009_002219 +2009_002221 +2009_002222 +2009_002226 +2009_002228 +2009_002230 +2009_002231 +2009_002232 +2009_002238 +2009_002239 +2009_002242 +2009_002252 +2009_002257 +2009_002265 +2009_002267 +2009_002268 +2009_002272 +2009_002282 +2009_002286 +2009_002291 +2009_002295 +2009_002302 +2009_002305 +2009_002306 +2009_002308 +2009_002317 +2009_002319 +2009_002320 +2009_002325 +2009_002328 +2009_002333 +2009_002335 +2009_002346 +2009_002349 +2009_002350 +2009_002360 +2009_002363 +2009_002366 +2009_002370 +2009_002372 +2009_002373 +2009_002374 +2009_002380 +2009_002382 +2009_002390 +2009_002393 +2009_002398 +2009_002399 +2009_002400 +2009_002401 +2009_002407 +2009_002414 +2009_002415 +2009_002420 +2009_002432 +2009_002433 +2009_002436 +2009_002441 +2009_002444 +2009_002445 +2009_002449 +2009_002453 +2009_002457 +2009_002464 +2009_002465 +2009_002470 +2009_002471 +2009_002474 +2009_002475 +2009_002476 +2009_002477 +2009_002487 +2009_002488 +2009_002499 +2009_002500 +2009_002510 +2009_002512 +2009_002515 +2009_002517 +2009_002518 +2009_002521 +2009_002524 +2009_002525 +2009_002527 +2009_002531 +2009_002532 +2009_002535 +2009_002537 +2009_002539 +2009_002546 +2009_002549 +2009_002552 +2009_002562 +2009_002563 +2009_002566 +2009_002568 +2009_002569 +2009_002570 +2009_002571 +2009_002573 +2009_002580 +2009_002584 +2009_002591 +2009_002592 +2009_002594 +2009_002604 +2009_002607 +2009_002608 +2009_002609 +2009_002614 +2009_002618 +2009_002632 +2009_002634 +2009_002635 +2009_002638 +2009_002645 +2009_002649 +2009_002651 +2009_002663 +2009_002665 +2009_002667 +2009_002668 +2009_002669 +2009_002670 +2009_002673 +2009_002675 +2009_002680 +2009_002681 +2009_002683 +2009_002684 +2009_002687 +2009_002698 +2009_002708 +2009_002711 +2009_002712 +2009_002717 +2009_002727 +2009_002732 +2009_002733 +2009_002739 +2009_002741 +2009_002743 +2009_002744 +2009_002749 +2009_002752 +2009_002753 +2009_002754 +2009_002755 +2009_002762 +2009_002765 +2009_002771 +2009_002772 +2009_002774 +2009_002777 +2009_002778 +2009_002779 +2009_002785 +2009_002790 +2009_002800 +2009_002803 +2009_002806 +2009_002807 +2009_002808 +2009_002809 +2009_002816 +2009_002830 +2009_002833 +2009_002836 +2009_002837 +2009_002838 +2009_002841 +2009_002856 +2009_002865 +2009_002876 +2009_002877 +2009_002882 +2009_002883 +2009_002887 +2009_002888 +2009_002894 +2009_002898 +2009_002902 +2009_002910 +2009_002918 +2009_002920 +2009_002925 +2009_002928 +2009_002936 +2009_002938 +2009_002940 +2009_002941 +2009_002960 +2009_002962 +2009_002967 +2009_002975 +2009_002977 +2009_002978 +2009_002982 +2009_002985 +2009_002986 +2009_002990 +2009_002995 +2009_002998 +2009_003003 +2009_003005 +2009_003010 +2009_003013 +2009_003018 +2009_003020 +2009_003022 +2009_003023 +2009_003031 +2009_003033 +2009_003043 +2009_003044 +2009_003052 +2009_003058 +2009_003059 +2009_003063 +2009_003065 +2009_003070 +2009_003071 +2009_003074 +2009_003076 +2009_003080 +2009_003083 +2009_003084 +2009_003089 +2009_003097 +2009_003098 +2009_003105 +2009_003110 +2009_003114 +2009_003122 +2009_003123 +2009_003125 +2009_003126 +2009_003128 +2009_003129 +2009_003130 +2009_003132 +2009_003136 +2009_003140 +2009_003143 +2009_003144 +2009_003150 +2009_003151 +2009_003153 +2009_003154 +2009_003181 +2009_003183 +2009_003185 +2009_003189 +2009_003191 +2009_003193 +2009_003194 +2009_003196 +2009_003198 +2009_003199 +2009_003201 +2009_003204 +2009_003212 +2009_003214 +2009_003217 +2009_003224 +2009_003230 +2009_003238 +2009_003241 +2009_003247 +2009_003251 +2009_003254 +2009_003255 +2009_003259 +2009_003262 +2009_003266 +2009_003269 +2009_003271 +2009_003273 +2009_003276 +2009_003278 +2009_003282 +2009_003284 +2009_003288 +2009_003294 +2009_003297 +2009_003299 +2009_003300 +2009_003301 +2009_003304 +2009_003305 +2009_003311 +2009_003312 +2009_003320 +2009_003323 +2009_003343 +2009_003346 +2009_003347 +2009_003348 +2009_003351 +2009_003372 +2009_003373 +2009_003375 +2009_003376 +2009_003378 +2009_003379 +2009_003380 +2009_003387 +2009_003394 +2009_003399 +2009_003400 +2009_003406 +2009_003409 +2009_003411 +2009_003415 +2009_003417 +2009_003422 +2009_003431 +2009_003433 +2009_003440 +2009_003441 +2009_003445 +2009_003450 +2009_003453 +2009_003456 +2009_003457 +2009_003460 +2009_003462 +2009_003466 +2009_003467 +2009_003469 +2009_003476 +2009_003481 +2009_003487 +2009_003491 +2009_003494 +2009_003498 +2009_003499 +2009_003500 +2009_003504 +2009_003507 +2009_003508 +2009_003509 +2009_003517 +2009_003521 +2009_003523 +2009_003528 +2009_003530 +2009_003537 +2009_003538 +2009_003542 +2009_003543 +2009_003544 +2009_003549 +2009_003551 +2009_003554 +2009_003560 +2009_003564 +2009_003565 +2009_003566 +2009_003569 +2009_003571 +2009_003576 +2009_003581 +2009_003588 +2009_003589 +2009_003592 +2009_003598 +2009_003606 +2009_003607 +2009_003612 +2009_003618 +2009_003626 +2009_003627 +2009_003633 +2009_003635 +2009_003637 +2009_003638 +2009_003640 +2009_003642 +2009_003650 +2009_003655 +2009_003656 +2009_003664 +2009_003666 +2009_003669 +2009_003671 +2009_003679 +2009_003686 +2009_003689 +2009_003696 +2009_003698 +2009_003703 +2009_003704 +2009_003707 +2009_003710 +2009_003713 +2009_003714 +2009_003718 +2009_003725 +2009_003726 +2009_003738 +2009_003747 +2009_003751 +2009_003756 +2009_003758 +2009_003759 +2009_003771 +2009_003773 +2009_003776 +2009_003781 +2009_003785 +2009_003795 +2009_003800 +2009_003802 +2009_003804 +2009_003805 +2009_003806 +2009_003810 +2009_003813 +2009_003814 +2009_003821 +2009_003822 +2009_003832 +2009_003835 +2009_003836 +2009_003840 +2009_003847 +2009_003849 +2009_003855 +2009_003857 +2009_003858 +2009_003863 +2009_003870 +2009_003874 +2009_003879 +2009_003884 +2009_003892 +2009_003895 +2009_003899 +2009_003901 +2009_003902 +2009_003903 +2009_003904 +2009_003905 +2009_003908 +2009_003911 +2009_003914 +2009_003916 +2009_003928 +2009_003929 +2009_003936 +2009_003938 +2009_003944 +2009_003947 +2009_003950 +2009_003951 +2009_003955 +2009_003962 +2009_003965 +2009_003969 +2009_003971 +2009_003973 +2009_003977 +2009_003982 +2009_003986 +2009_003991 +2009_003992 +2009_003995 +2009_004001 +2009_004004 +2009_004016 +2009_004019 +2009_004021 +2009_004022 +2009_004031 +2009_004032 +2009_004033 +2009_004034 +2009_004038 +2009_004040 +2009_004043 +2009_004044 +2009_004050 +2009_004051 +2009_004052 +2009_004062 +2009_004070 +2009_004072 +2009_004075 +2009_004076 +2009_004084 +2009_004085 +2009_004092 +2009_004093 +2009_004099 +2009_004102 +2009_004108 +2009_004111 +2009_004113 +2009_004124 +2009_004125 +2009_004126 +2009_004128 +2009_004129 +2009_004131 +2009_004138 +2009_004140 +2009_004141 +2009_004142 +2009_004148 +2009_004150 +2009_004152 +2009_004157 +2009_004161 +2009_004163 +2009_004164 +2009_004166 +2009_004170 +2009_004175 +2009_004188 +2009_004193 +2009_004197 +2009_004203 +2009_004205 +2009_004207 +2009_004210 +2009_004217 +2009_004221 +2009_004224 +2009_004232 +2009_004233 +2009_004241 +2009_004242 +2009_004243 +2009_004247 +2009_004248 +2009_004255 +2009_004258 +2009_004262 +2009_004263 +2009_004272 +2009_004273 +2009_004274 +2009_004277 +2009_004284 +2009_004291 +2009_004298 +2009_004300 +2009_004303 +2009_004307 +2009_004309 +2009_004324 +2009_004329 +2009_004332 +2009_004346 +2009_004350 +2009_004359 +2009_004361 +2009_004364 +2009_004366 +2009_004377 +2009_004382 +2009_004390 +2009_004402 +2009_004403 +2009_004404 +2009_004410 +2009_004411 +2009_004414 +2009_004419 +2009_004435 +2009_004436 +2009_004440 +2009_004448 +2009_004453 +2009_004455 +2009_004456 +2009_004457 +2009_004468 +2009_004471 +2009_004478 +2009_004483 +2009_004494 +2009_004496 +2009_004497 +2009_004499 +2009_004502 +2009_004504 +2009_004507 +2009_004509 +2009_004518 +2009_004524 +2009_004525 +2009_004529 +2009_004530 +2009_004532 +2009_004535 +2009_004536 +2009_004537 +2009_004540 +2009_004542 +2009_004543 +2009_004548 +2009_004551 +2009_004552 +2009_004556 +2009_004559 +2009_004567 +2009_004568 +2009_004579 +2009_004580 +2009_004581 +2009_004587 +2009_004588 +2009_004590 +2009_004592 +2009_004594 +2009_004601 +2009_004607 +2009_004614 +2009_004623 +2009_004624 +2009_004625 +2009_004629 +2009_004630 +2009_004634 +2009_004635 +2009_004645 +2009_004648 +2009_004653 +2009_004655 +2009_004664 +2009_004669 +2009_004670 +2009_004677 +2009_004679 +2009_004686 +2009_004687 +2009_004697 +2009_004706 +2009_004713 +2009_004716 +2009_004718 +2009_004720 +2009_004721 +2009_004730 +2009_004732 +2009_004738 +2009_004744 +2009_004746 +2009_004748 +2009_004749 +2009_004754 +2009_004758 +2009_004763 +2009_004765 +2009_004768 +2009_004769 +2009_004780 +2009_004781 +2009_004782 +2009_004784 +2009_004789 +2009_004796 +2009_004799 +2009_004801 +2009_004812 +2009_004820 +2009_004822 +2009_004823 +2009_004828 +2009_004834 +2009_004841 +2009_004845 +2009_004848 +2009_004849 +2009_004856 +2009_004857 +2009_004858 +2009_004859 +2009_004865 +2009_004867 +2009_004868 +2009_004869 +2009_004872 +2009_004876 +2009_004882 +2009_004885 +2009_004886 +2009_004889 +2009_004895 +2009_004897 +2009_004899 +2009_004902 +2009_004913 +2009_004917 +2009_004922 +2009_004929 +2009_004930 +2009_004933 +2009_004934 +2009_004940 +2009_004942 +2009_004946 +2009_004947 +2009_004956 +2009_004961 +2009_004969 +2009_004971 +2009_004974 +2009_004982 +2009_004987 +2009_004988 +2009_004993 +2009_004994 +2009_004996 +2009_005001 +2009_005005 +2009_005008 +2009_005019 +2009_005025 +2009_005033 +2009_005036 +2009_005038 +2009_005060 +2009_005061 +2009_005062 +2009_005064 +2009_005068 +2009_005073 +2009_005078 +2009_005079 +2009_005080 +2009_005082 +2009_005083 +2009_005086 +2009_005087 +2009_005089 +2009_005098 +2009_005103 +2009_005104 +2009_005111 +2009_005114 +2009_005119 +2009_005137 +2009_005140 +2009_005148 +2009_005149 +2009_005150 +2009_005152 +2009_005153 +2009_005156 +2009_005158 +2009_005161 +2009_005165 +2009_005171 +2009_005172 +2009_005178 +2009_005185 +2009_005189 +2009_005190 +2009_005193 +2009_005202 +2009_005203 +2009_005204 +2009_005205 +2009_005210 +2009_005211 +2009_005215 +2009_005216 +2009_005217 +2009_005219 +2009_005220 +2009_005221 +2009_005222 +2009_005225 +2009_005229 +2009_005231 +2009_005232 +2009_005239 +2009_005242 +2009_005257 +2009_005260 +2009_005262 +2009_005267 +2009_005268 +2009_005279 +2009_005286 +2009_005288 +2009_005292 +2009_005294 +2009_005299 +2009_005300 +2009_005302 +2009_005309 +2009_005310 +2010_000001 +2010_000003 +2010_000015 +2010_000020 +2010_000024 +2010_000027 +2010_000033 +2010_000035 +2010_000036 +2010_000038 +2010_000050 +2010_000053 +2010_000054 +2010_000065 +2010_000069 +2010_000072 +2010_000074 +2010_000083 +2010_000084 +2010_000085 +2010_000087 +2010_000088 +2010_000090 +2010_000095 +2010_000097 +2010_000098 +2010_000099 +2010_000110 +2010_000113 +2010_000118 +2010_000127 +2010_000139 +2010_000140 +2010_000145 +2010_000151 +2010_000159 +2010_000160 +2010_000162 +2010_000163 +2010_000170 +2010_000172 +2010_000174 +2010_000175 +2010_000178 +2010_000184 +2010_000193 +2010_000194 +2010_000196 +2010_000197 +2010_000199 +2010_000202 +2010_000211 +2010_000213 +2010_000216 +2010_000218 +2010_000238 +2010_000241 +2010_000246 +2010_000247 +2010_000254 +2010_000256 +2010_000260 +2010_000261 +2010_000262 +2010_000266 +2010_000272 +2010_000273 +2010_000279 +2010_000283 +2010_000284 +2010_000286 +2010_000291 +2010_000295 +2010_000308 +2010_000309 +2010_000312 +2010_000313 +2010_000317 +2010_000318 +2010_000321 +2010_000324 +2010_000325 +2010_000327 +2010_000330 +2010_000335 +2010_000336 +2010_000342 +2010_000344 +2010_000349 +2010_000351 +2010_000352 +2010_000358 +2010_000370 +2010_000372 +2010_000374 +2010_000375 +2010_000376 +2010_000379 +2010_000381 +2010_000382 +2010_000384 +2010_000390 +2010_000399 +2010_000401 +2010_000406 +2010_000409 +2010_000418 +2010_000422 +2010_000426 +2010_000427 +2010_000431 +2010_000433 +2010_000435 +2010_000442 +2010_000444 +2010_000446 +2010_000449 +2010_000456 +2010_000461 +2010_000462 +2010_000468 +2010_000470 +2010_000474 +2010_000475 +2010_000483 +2010_000485 +2010_000493 +2010_000497 +2010_000502 +2010_000506 +2010_000510 +2010_000515 +2010_000524 +2010_000526 +2010_000530 +2010_000536 +2010_000537 +2010_000541 +2010_000547 +2010_000548 +2010_000552 +2010_000553 +2010_000559 +2010_000562 +2010_000572 +2010_000573 +2010_000574 +2010_000582 +2010_000583 +2010_000586 +2010_000590 +2010_000602 +2010_000603 +2010_000604 +2010_000608 +2010_000617 +2010_000621 +2010_000622 +2010_000624 +2010_000628 +2010_000633 +2010_000635 +2010_000639 +2010_000646 +2010_000647 +2010_000655 +2010_000665 +2010_000666 +2010_000669 +2010_000679 +2010_000682 +2010_000683 +2010_000689 +2010_000690 +2010_000692 +2010_000695 +2010_000697 +2010_000705 +2010_000711 +2010_000712 +2010_000722 +2010_000724 +2010_000726 +2010_000727 +2010_000729 +2010_000731 +2010_000735 +2010_000737 +2010_000738 +2010_000743 +2010_000744 +2010_000749 +2010_000754 +2010_000759 +2010_000761 +2010_000764 +2010_000771 +2010_000778 +2010_000786 +2010_000788 +2010_000791 +2010_000792 +2010_000797 +2010_000802 +2010_000805 +2010_000811 +2010_000814 +2010_000821 +2010_000822 +2010_000828 +2010_000829 +2010_000830 +2010_000831 +2010_000836 +2010_000846 +2010_000865 +2010_000866 +2010_000870 +2010_000874 +2010_000875 +2010_000876 +2010_000883 +2010_000889 +2010_000893 +2010_000897 +2010_000898 +2010_000904 +2010_000906 +2010_000907 +2010_000915 +2010_000918 +2010_000923 +2010_000927 +2010_000928 +2010_000929 +2010_000931 +2010_000941 +2010_000944 +2010_000945 +2010_000947 +2010_000948 +2010_000952 +2010_000955 +2010_000956 +2010_000959 +2010_000961 +2010_000968 +2010_000973 +2010_000975 +2010_000981 +2010_000989 +2010_000993 +2010_000996 +2010_001000 +2010_001006 +2010_001008 +2010_001009 +2010_001010 +2010_001011 +2010_001016 +2010_001017 +2010_001021 +2010_001024 +2010_001030 +2010_001032 +2010_001036 +2010_001042 +2010_001049 +2010_001051 +2010_001052 +2010_001057 +2010_001061 +2010_001069 +2010_001070 +2010_001077 +2010_001079 +2010_001080 +2010_001082 +2010_001085 +2010_001089 +2010_001099 +2010_001104 +2010_001107 +2010_001109 +2010_001117 +2010_001119 +2010_001124 +2010_001125 +2010_001127 +2010_001130 +2010_001147 +2010_001149 +2010_001151 +2010_001158 +2010_001163 +2010_001164 +2010_001172 +2010_001174 +2010_001181 +2010_001188 +2010_001189 +2010_001192 +2010_001201 +2010_001204 +2010_001206 +2010_001214 +2010_001215 +2010_001216 +2010_001218 +2010_001219 +2010_001220 +2010_001229 +2010_001234 +2010_001241 +2010_001242 +2010_001246 +2010_001251 +2010_001256 +2010_001257 +2010_001263 +2010_001264 +2010_001270 +2010_001272 +2010_001286 +2010_001287 +2010_001291 +2010_001292 +2010_001293 +2010_001294 +2010_001301 +2010_001305 +2010_001313 +2010_001315 +2010_001321 +2010_001325 +2010_001326 +2010_001327 +2010_001331 +2010_001333 +2010_001343 +2010_001351 +2010_001355 +2010_001357 +2010_001364 +2010_001367 +2010_001376 +2010_001382 +2010_001394 +2010_001403 +2010_001405 +2010_001407 +2010_001411 +2010_001412 +2010_001417 +2010_001421 +2010_001426 +2010_001432 +2010_001439 +2010_001441 +2010_001448 +2010_001449 +2010_001451 +2010_001452 +2010_001453 +2010_001455 +2010_001461 +2010_001463 +2010_001468 +2010_001473 +2010_001479 +2010_001486 +2010_001497 +2010_001501 +2010_001502 +2010_001505 +2010_001516 +2010_001518 +2010_001520 +2010_001522 +2010_001525 +2010_001528 +2010_001534 +2010_001535 +2010_001536 +2010_001539 +2010_001540 +2010_001543 +2010_001544 +2010_001548 +2010_001553 +2010_001557 +2010_001563 +2010_001571 +2010_001574 +2010_001577 +2010_001579 +2010_001584 +2010_001586 +2010_001587 +2010_001601 +2010_001606 +2010_001614 +2010_001625 +2010_001633 +2010_001635 +2010_001636 +2010_001637 +2010_001640 +2010_001645 +2010_001646 +2010_001652 +2010_001656 +2010_001659 +2010_001668 +2010_001669 +2010_001671 +2010_001675 +2010_001679 +2010_001680 +2010_001682 +2010_001685 +2010_001690 +2010_001691 +2010_001692 +2010_001697 +2010_001699 +2010_001705 +2010_001712 +2010_001717 +2010_001720 +2010_001731 +2010_001734 +2010_001737 +2010_001739 +2010_001749 +2010_001752 +2010_001754 +2010_001757 +2010_001760 +2010_001763 +2010_001767 +2010_001768 +2010_001771 +2010_001773 +2010_001777 +2010_001783 +2010_001787 +2010_001788 +2010_001796 +2010_001803 +2010_001814 +2010_001819 +2010_001820 +2010_001821 +2010_001823 +2010_001827 +2010_001828 +2010_001829 +2010_001830 +2010_001836 +2010_001837 +2010_001838 +2010_001843 +2010_001845 +2010_001851 +2010_001857 +2010_001862 +2010_001863 +2010_001868 +2010_001869 +2010_001877 +2010_001891 +2010_001892 +2010_001893 +2010_001904 +2010_001907 +2010_001908 +2010_001913 +2010_001916 +2010_001918 +2010_001921 +2010_001927 +2010_001929 +2010_001937 +2010_001938 +2010_001950 +2010_001951 +2010_001954 +2010_001956 +2010_001962 +2010_001966 +2010_001967 +2010_001968 +2010_001986 +2010_001987 +2010_001988 +2010_001992 +2010_001995 +2010_001998 +2010_002000 +2010_002002 +2010_002005 +2010_002006 +2010_002017 +2010_002019 +2010_002022 +2010_002025 +2010_002029 +2010_002030 +2010_002040 +2010_002041 +2010_002046 +2010_002048 +2010_002050 +2010_002058 +2010_002060 +2010_002067 +2010_002073 +2010_002085 +2010_002086 +2010_002089 +2010_002094 +2010_002096 +2010_002098 +2010_002100 +2010_002102 +2010_002105 +2010_002106 +2010_002113 +2010_002117 +2010_002124 +2010_002128 +2010_002133 +2010_002137 +2010_002138 +2010_002142 +2010_002146 +2010_002147 +2010_002150 +2010_002161 +2010_002167 +2010_002172 +2010_002175 +2010_002181 +2010_002182 +2010_002183 +2010_002187 +2010_002192 +2010_002194 +2010_002195 +2010_002199 +2010_002200 +2010_002211 +2010_002213 +2010_002219 +2010_002223 +2010_002224 +2010_002228 +2010_002229 +2010_002232 +2010_002244 +2010_002245 +2010_002247 +2010_002251 +2010_002255 +2010_002261 +2010_002269 +2010_002271 +2010_002276 +2010_002279 +2010_002283 +2010_002287 +2010_002289 +2010_002294 +2010_002303 +2010_002305 +2010_002307 +2010_002310 +2010_002313 +2010_002315 +2010_002316 +2010_002319 +2010_002321 +2010_002326 +2010_002332 +2010_002336 +2010_002337 +2010_002340 +2010_002348 +2010_002354 +2010_002357 +2010_002361 +2010_002365 +2010_002366 +2010_002370 +2010_002372 +2010_002373 +2010_002383 +2010_002388 +2010_002390 +2010_002396 +2010_002398 +2010_002402 +2010_002405 +2010_002406 +2010_002408 +2010_002409 +2010_002420 +2010_002422 +2010_002427 +2010_002436 +2010_002446 +2010_002448 +2010_002449 +2010_002450 +2010_002458 +2010_002460 +2010_002461 +2010_002468 +2010_002479 +2010_002480 +2010_002482 +2010_002484 +2010_002504 +2010_002510 +2010_002512 +2010_002516 +2010_002518 +2010_002526 +2010_002531 +2010_002533 +2010_002534 +2010_002536 +2010_002538 +2010_002539 +2010_002542 +2010_002543 +2010_002546 +2010_002547 +2010_002561 +2010_002565 +2010_002569 +2010_002578 +2010_002579 +2010_002580 +2010_002586 +2010_002587 +2010_002597 +2010_002598 +2010_002601 +2010_002602 +2010_002603 +2010_002605 +2010_002621 +2010_002623 +2010_002629 +2010_002631 +2010_002632 +2010_002638 +2010_002639 +2010_002645 +2010_002652 +2010_002654 +2010_002660 +2010_002661 +2010_002666 +2010_002667 +2010_002668 +2010_002676 +2010_002678 +2010_002679 +2010_002682 +2010_002691 +2010_002693 +2010_002695 +2010_002701 +2010_002704 +2010_002705 +2010_002710 +2010_002713 +2010_002714 +2010_002716 +2010_002721 +2010_002723 +2010_002725 +2010_002728 +2010_002736 +2010_002737 +2010_002740 +2010_002741 +2010_002754 +2010_002758 +2010_002763 +2010_002767 +2010_002770 +2010_002771 +2010_002774 +2010_002775 +2010_002780 +2010_002783 +2010_002789 +2010_002790 +2010_002791 +2010_002792 +2010_002793 +2010_002803 +2010_002807 +2010_002808 +2010_002814 +2010_002817 +2010_002822 +2010_002824 +2010_002827 +2010_002840 +2010_002845 +2010_002853 +2010_002854 +2010_002858 +2010_002860 +2010_002864 +2010_002868 +2010_002871 +2010_002873 +2010_002876 +2010_002877 +2010_002879 +2010_002881 +2010_002887 +2010_002900 +2010_002902 +2010_002905 +2010_002914 +2010_002921 +2010_002924 +2010_002927 +2010_002929 +2010_002930 +2010_002939 +2010_002940 +2010_002954 +2010_002956 +2010_002958 +2010_002960 +2010_002963 +2010_002965 +2010_002972 +2010_002980 +2010_002985 +2010_002988 +2010_002991 +2010_002993 +2010_002995 +2010_003014 +2010_003015 +2010_003016 +2010_003019 +2010_003024 +2010_003040 +2010_003043 +2010_003051 +2010_003054 +2010_003060 +2010_003067 +2010_003071 +2010_003072 +2010_003074 +2010_003081 +2010_003082 +2010_003091 +2010_003092 +2010_003098 +2010_003102 +2010_003103 +2010_003107 +2010_003112 +2010_003120 +2010_003122 +2010_003123 +2010_003127 +2010_003129 +2010_003132 +2010_003133 +2010_003135 +2010_003139 +2010_003146 +2010_003147 +2010_003154 +2010_003156 +2010_003160 +2010_003168 +2010_003176 +2010_003183 +2010_003187 +2010_003190 +2010_003199 +2010_003200 +2010_003201 +2010_003207 +2010_003212 +2010_003214 +2010_003219 +2010_003220 +2010_003223 +2010_003231 +2010_003232 +2010_003233 +2010_003236 +2010_003239 +2010_003240 +2010_003244 +2010_003248 +2010_003249 +2010_003251 +2010_003253 +2010_003257 +2010_003260 +2010_003270 +2010_003275 +2010_003276 +2010_003278 +2010_003279 +2010_003285 +2010_003287 +2010_003293 +2010_003299 +2010_003302 +2010_003303 +2010_003314 +2010_003316 +2010_003321 +2010_003325 +2010_003326 +2010_003331 +2010_003335 +2010_003341 +2010_003358 +2010_003361 +2010_003362 +2010_003365 +2010_003366 +2010_003368 +2010_003375 +2010_003376 +2010_003379 +2010_003381 +2010_003385 +2010_003390 +2010_003397 +2010_003398 +2010_003401 +2010_003402 +2010_003409 +2010_003411 +2010_003418 +2010_003419 +2010_003427 +2010_003429 +2010_003446 +2010_003450 +2010_003451 +2010_003453 +2010_003458 +2010_003461 +2010_003465 +2010_003467 +2010_003468 +2010_003470 +2010_003473 +2010_003479 +2010_003482 +2010_003488 +2010_003490 +2010_003493 +2010_003495 +2010_003496 +2010_003497 +2010_003503 +2010_003506 +2010_003508 +2010_003514 +2010_003520 +2010_003522 +2010_003527 +2010_003531 +2010_003532 +2010_003537 +2010_003540 +2010_003541 +2010_003547 +2010_003559 +2010_003561 +2010_003562 +2010_003563 +2010_003568 +2010_003569 +2010_003573 +2010_003579 +2010_003585 +2010_003588 +2010_003594 +2010_003597 +2010_003603 +2010_003605 +2010_003609 +2010_003610 +2010_003613 +2010_003628 +2010_003630 +2010_003632 +2010_003640 +2010_003641 +2010_003645 +2010_003653 +2010_003655 +2010_003659 +2010_003664 +2010_003667 +2010_003673 +2010_003675 +2010_003679 +2010_003687 +2010_003688 +2010_003695 +2010_003701 +2010_003708 +2010_003709 +2010_003716 +2010_003723 +2010_003724 +2010_003728 +2010_003729 +2010_003730 +2010_003731 +2010_003735 +2010_003742 +2010_003744 +2010_003745 +2010_003746 +2010_003755 +2010_003757 +2010_003758 +2010_003761 +2010_003762 +2010_003764 +2010_003768 +2010_003771 +2010_003772 +2010_003774 +2010_003779 +2010_003781 +2010_003792 +2010_003800 +2010_003801 +2010_003805 +2010_003806 +2010_003807 +2010_003811 +2010_003813 +2010_003820 +2010_003823 +2010_003826 +2010_003828 +2010_003847 +2010_003848 +2010_003852 +2010_003854 +2010_003855 +2010_003857 +2010_003859 +2010_003861 +2010_003863 +2010_003878 +2010_003879 +2010_003890 +2010_003898 +2010_003912 +2010_003915 +2010_003919 +2010_003920 +2010_003928 +2010_003933 +2010_003936 +2010_003939 +2010_003942 +2010_003943 +2010_003944 +2010_003947 +2010_003955 +2010_003956 +2010_003961 +2010_003966 +2010_003970 +2010_003971 +2010_003976 +2010_003980 +2010_003981 +2010_003983 +2010_003988 +2010_003999 +2010_004006 +2010_004010 +2010_004021 +2010_004023 +2010_004026 +2010_004027 +2010_004031 +2010_004036 +2010_004037 +2010_004041 +2010_004042 +2010_004050 +2010_004054 +2010_004056 +2010_004063 +2010_004064 +2010_004067 +2010_004073 +2010_004088 +2010_004094 +2010_004095 +2010_004096 +2010_004102 +2010_004104 +2010_004105 +2010_004107 +2010_004120 +2010_004124 +2010_004125 +2010_004129 +2010_004137 +2010_004139 +2010_004140 +2010_004141 +2010_004143 +2010_004145 +2010_004149 +2010_004157 +2010_004161 +2010_004165 +2010_004173 +2010_004178 +2010_004179 +2010_004182 +2010_004184 +2010_004187 +2010_004188 +2010_004193 +2010_004201 +2010_004207 +2010_004208 +2010_004209 +2010_004211 +2010_004219 +2010_004224 +2010_004225 +2010_004226 +2010_004227 +2010_004228 +2010_004229 +2010_004230 +2010_004238 +2010_004253 +2010_004254 +2010_004257 +2010_004263 +2010_004278 +2010_004279 +2010_004280 +2010_004286 +2010_004290 +2010_004291 +2010_004297 +2010_004304 +2010_004312 +2010_004313 +2010_004314 +2010_004318 +2010_004320 +2010_004322 +2010_004335 +2010_004337 +2010_004339 +2010_004341 +2010_004345 +2010_004348 +2010_004350 +2010_004351 +2010_004352 +2010_004355 +2010_004362 +2010_004369 +2010_004374 +2010_004380 +2010_004382 +2010_004387 +2010_004390 +2010_004391 +2010_004400 +2010_004404 +2010_004409 +2010_004415 +2010_004417 +2010_004419 +2010_004420 +2010_004422 +2010_004425 +2010_004428 +2010_004431 +2010_004432 +2010_004439 +2010_004447 +2010_004455 +2010_004456 +2010_004457 +2010_004460 +2010_004461 +2010_004469 +2010_004472 +2010_004475 +2010_004479 +2010_004483 +2010_004484 +2010_004486 +2010_004488 +2010_004503 +2010_004505 +2010_004506 +2010_004509 +2010_004515 +2010_004519 +2010_004520 +2010_004529 +2010_004533 +2010_004536 +2010_004537 +2010_004542 +2010_004543 +2010_004545 +2010_004550 +2010_004551 +2010_004553 +2010_004554 +2010_004556 +2010_004557 +2010_004559 +2010_004567 +2010_004570 +2010_004584 +2010_004585 +2010_004586 +2010_004588 +2010_004596 +2010_004597 +2010_004608 +2010_004618 +2010_004624 +2010_004627 +2010_004628 +2010_004629 +2010_004634 +2010_004635 +2010_004637 +2010_004642 +2010_004654 +2010_004659 +2010_004661 +2010_004662 +2010_004667 +2010_004670 +2010_004672 +2010_004677 +2010_004679 +2010_004681 +2010_004686 +2010_004691 +2010_004692 +2010_004697 +2010_004714 +2010_004722 +2010_004733 +2010_004735 +2010_004743 +2010_004747 +2010_004748 +2010_004750 +2010_004753 +2010_004756 +2010_004757 +2010_004763 +2010_004768 +2010_004772 +2010_004775 +2010_004778 +2010_004779 +2010_004783 +2010_004785 +2010_004786 +2010_004789 +2010_004792 +2010_004795 +2010_004804 +2010_004809 +2010_004813 +2010_004815 +2010_004817 +2010_004821 +2010_004825 +2010_004828 +2010_004829 +2010_004830 +2010_004836 +2010_004849 +2010_004854 +2010_004856 +2010_004857 +2010_004861 +2010_004865 +2010_004866 +2010_004868 +2010_004877 +2010_004889 +2010_004891 +2010_004894 +2010_004901 +2010_004903 +2010_004906 +2010_004908 +2010_004909 +2010_004917 +2010_004919 +2010_004921 +2010_004930 +2010_004931 +2010_004941 +2010_004946 +2010_004951 +2010_004952 +2010_004954 +2010_004957 +2010_004967 +2010_004980 +2010_004982 +2010_004989 +2010_004992 +2010_004994 +2010_004998 +2010_005000 +2010_005005 +2010_005006 +2010_005008 +2010_005013 +2010_005021 +2010_005023 +2010_005026 +2010_005031 +2010_005035 +2010_005042 +2010_005044 +2010_005046 +2010_005048 +2010_005049 +2010_005052 +2010_005053 +2010_005059 +2010_005061 +2010_005063 +2010_005066 +2010_005075 +2010_005079 +2010_005082 +2010_005083 +2010_005087 +2010_005096 +2010_005107 +2010_005108 +2010_005109 +2010_005115 +2010_005116 +2010_005118 +2010_005120 +2010_005123 +2010_005130 +2010_005136 +2010_005138 +2010_005141 +2010_005143 +2010_005148 +2010_005152 +2010_005158 +2010_005159 +2010_005160 +2010_005164 +2010_005166 +2010_005167 +2010_005169 +2010_005174 +2010_005180 +2010_005184 +2010_005185 +2010_005187 +2010_005188 +2010_005192 +2010_005206 +2010_005208 +2010_005215 +2010_005222 +2010_005224 +2010_005226 +2010_005230 +2010_005239 +2010_005242 +2010_005243 +2010_005245 +2010_005246 +2010_005250 +2010_005252 +2010_005264 +2010_005268 +2010_005272 +2010_005284 +2010_005285 +2010_005287 +2010_005292 +2010_005293 +2010_005305 +2010_005314 +2010_005323 +2010_005327 +2010_005330 +2010_005331 +2010_005332 +2010_005338 +2010_005340 +2010_005344 +2010_005345 +2010_005346 +2010_005353 +2010_005366 +2010_005369 +2010_005372 +2010_005374 +2010_005375 +2010_005379 +2010_005382 +2010_005394 +2010_005398 +2010_005401 +2010_005405 +2010_005406 +2010_005410 +2010_005414 +2010_005416 +2010_005421 +2010_005424 +2010_005425 +2010_005428 +2010_005432 +2010_005433 +2010_005441 +2010_005448 +2010_005452 +2010_005455 +2010_005456 +2010_005463 +2010_005467 +2010_005472 +2010_005474 +2010_005480 +2010_005482 +2010_005483 +2010_005484 +2010_005491 +2010_005493 +2010_005496 +2010_005501 +2010_005502 +2010_005508 +2010_005514 +2010_005515 +2010_005516 +2010_005527 +2010_005531 +2010_005532 +2010_005534 +2010_005538 +2010_005542 +2010_005543 +2010_005548 +2010_005551 +2010_005556 +2010_005562 +2010_005566 +2010_005567 +2010_005572 +2010_005575 +2010_005576 +2010_005582 +2010_005586 +2010_005587 +2010_005592 +2010_005594 +2010_005606 +2010_005610 +2010_005612 +2010_005620 +2010_005625 +2010_005626 +2010_005632 +2010_005635 +2010_005636 +2010_005637 +2010_005644 +2010_005647 +2010_005651 +2010_005654 +2010_005657 +2010_005658 +2010_005664 +2010_005666 +2010_005671 +2010_005676 +2010_005681 +2010_005688 +2010_005692 +2010_005697 +2010_005705 +2010_005706 +2010_005709 +2010_005712 +2010_005718 +2010_005719 +2010_005727 +2010_005731 +2010_005733 +2010_005738 +2010_005747 +2010_005752 +2010_005754 +2010_005756 +2010_005761 +2010_005762 +2010_005763 +2010_005764 +2010_005767 +2010_005768 +2010_005777 +2010_005780 +2010_005784 +2010_005788 +2010_005804 +2010_005806 +2010_005815 +2010_005817 +2010_005824 +2010_005827 +2010_005833 +2010_005837 +2010_005838 +2010_005843 +2010_005848 +2010_005849 +2010_005853 +2010_005860 +2010_005868 +2010_005870 +2010_005871 +2010_005877 +2010_005882 +2010_005883 +2010_005884 +2010_005885 +2010_005886 +2010_005888 +2010_005894 +2010_005896 +2010_005897 +2010_005899 +2010_005901 +2010_005903 +2010_005907 +2010_005914 +2010_005922 +2010_005934 +2010_005936 +2010_005937 +2010_005938 +2010_005943 +2010_005953 +2010_005973 +2010_005976 +2010_005980 +2010_005981 +2010_005991 +2010_005992 +2010_005993 +2010_005997 +2010_005998 +2010_006000 +2010_006003 +2010_006004 +2010_006010 +2010_006011 +2010_006021 +2010_006025 +2010_006026 +2010_006031 +2010_006032 +2010_006033 +2010_006034 +2010_006035 +2010_006037 +2010_006041 +2010_006051 +2010_006054 +2010_006056 +2010_006057 +2010_006058 +2010_006061 +2010_006062 +2010_006070 +2010_006076 +2010_006082 +2010_006084 +2010_006086 +2011_000002 +2011_000007 +2011_000009 +2011_000010 +2011_000016 +2011_000034 +2011_000036 +2011_000037 +2011_000038 +2011_000043 +2011_000045 +2011_000051 +2011_000054 +2011_000057 +2011_000060 +2011_000061 +2011_000065 +2011_000066 +2011_000070 +2011_000071 +2011_000076 +2011_000077 +2011_000082 +2011_000083 +2011_000084 +2011_000086 +2011_000087 +2011_000090 +2011_000094 +2011_000096 +2011_000098 +2011_000102 +2011_000103 +2011_000109 +2011_000112 +2011_000114 +2011_000124 +2011_000128 +2011_000129 +2011_000130 +2011_000142 +2011_000146 +2011_000147 +2011_000161 +2011_000162 +2011_000163 +2011_000165 +2011_000166 +2011_000173 +2011_000178 +2011_000180 +2011_000185 +2011_000194 +2011_000195 +2011_000202 +2011_000206 +2011_000210 +2011_000213 +2011_000214 +2011_000226 +2011_000229 +2011_000232 +2011_000234 +2011_000238 +2011_000239 +2011_000246 +2011_000248 +2011_000253 +2011_000257 +2011_000273 +2011_000276 +2011_000283 +2011_000288 +2011_000291 +2011_000299 +2011_000304 +2011_000307 +2011_000309 +2011_000310 +2011_000312 +2011_000314 +2011_000315 +2011_000319 +2011_000320 +2011_000321 +2011_000322 +2011_000332 +2011_000338 +2011_000344 +2011_000346 +2011_000364 +2011_000369 +2011_000374 +2011_000386 +2011_000391 +2011_000396 +2011_000404 +2011_000408 +2011_000412 +2011_000418 +2011_000419 +2011_000426 +2011_000427 +2011_000432 +2011_000435 +2011_000436 +2011_000438 +2011_000445 +2011_000455 +2011_000456 +2011_000471 +2011_000474 +2011_000477 +2011_000479 +2011_000481 +2011_000482 +2011_000487 +2011_000498 +2011_000503 +2011_000511 +2011_000512 +2011_000514 +2011_000518 +2011_000519 +2011_000521 +2011_000526 +2011_000530 +2011_000532 +2011_000536 +2011_000541 +2011_000548 +2011_000554 +2011_000557 +2011_000559 +2011_000566 +2011_000569 +2011_000575 +2011_000585 +2011_000592 +2011_000598 +2011_000600 +2011_000607 +2011_000608 +2011_000609 +2011_000612 +2011_000618 +2011_000622 +2011_000627 +2011_000630 +2011_000634 +2011_000638 +2011_000656 +2011_000658 +2011_000661 +2011_000666 +2011_000669 +2011_000679 +2011_000683 +2011_000685 +2011_000688 +2011_000690 +2011_000709 +2011_000718 +2011_000724 +2011_000734 +2011_000743 +2011_000744 +2011_000745 +2011_000747 +2011_000749 +2011_000753 +2011_000765 +2011_000767 +2011_000770 +2011_000772 +2011_000774 +2011_000778 +2011_000780 +2011_000784 +2011_000785 +2011_000789 +2011_000807 +2011_000809 +2011_000813 +2011_000824 +2011_000830 +2011_000843 +2011_000850 +2011_000851 +2011_000853 +2011_000872 +2011_000874 +2011_000887 +2011_000888 +2011_000897 +2011_000900 +2011_000901 +2011_000908 +2011_000909 +2011_000912 +2011_000917 +2011_000919 +2011_000927 +2011_000930 +2011_000932 +2011_000933 +2011_000950 +2011_000951 +2011_000953 +2011_000957 +2011_000961 +2011_000965 +2011_000969 +2011_000977 +2011_000986 +2011_000990 +2011_001005 +2011_001008 +2011_001014 +2011_001019 +2011_001020 +2011_001025 +2011_001029 +2011_001032 +2011_001036 +2011_001040 +2011_001044 +2011_001047 +2011_001054 +2011_001056 +2011_001058 +2011_001060 +2011_001064 +2011_001069 +2011_001071 +2011_001081 +2011_001082 +2011_001084 +2011_001086 +2011_001100 +2011_001105 +2011_001106 +2011_001110 +2011_001111 +2011_001114 +2011_001116 +2011_001124 +2011_001126 +2011_001128 +2011_001137 +2011_001138 +2011_001146 +2011_001149 +2011_001152 +2011_001158 +2011_001159 +2011_001160 +2011_001161 +2011_001167 +2011_001190 +2011_001201 +2011_001203 +2011_001213 +2011_001217 +2011_001221 +2011_001223 +2011_001226 +2011_001229 +2011_001232 +2011_001245 +2011_001251 +2011_001252 +2011_001260 +2011_001261 +2011_001263 +2011_001264 +2011_001266 +2011_001271 +2011_001276 +2011_001281 +2011_001282 +2011_001283 +2011_001284 +2011_001287 +2011_001288 +2011_001290 +2011_001292 +2011_001295 +2011_001304 +2011_001305 +2011_001311 +2011_001313 +2011_001315 +2011_001319 +2011_001326 +2011_001327 +2011_001329 +2011_001330 +2011_001335 +2011_001337 +2011_001341 +2011_001346 +2011_001350 +2011_001355 +2011_001360 +2011_001366 +2011_001370 +2011_001375 +2011_001387 +2011_001388 +2011_001389 +2011_001390 +2011_001399 +2011_001404 +2011_001406 +2011_001407 +2011_001416 +2011_001421 +2011_001434 +2011_001440 +2011_001441 +2011_001447 +2011_001467 +2011_001471 +2011_001489 +2011_001501 +2011_001507 +2011_001508 +2011_001518 +2011_001521 +2011_001524 +2011_001525 +2011_001529 +2011_001530 +2011_001531 +2011_001534 +2011_001535 +2011_001541 +2011_001543 +2011_001544 +2011_001546 +2011_001558 +2011_001567 +2011_001568 +2011_001573 +2011_001589 +2011_001591 +2011_001592 +2011_001596 +2011_001597 +2011_001601 +2011_001607 +2011_001608 +2011_001612 +2011_001613 +2011_001614 +2011_001618 +2011_001619 +2011_001620 +2011_001624 +2011_001628 +2011_001641 +2011_001642 +2011_001655 +2011_001665 +2011_001669 +2011_001674 +2011_001678 +2011_001691 +2011_001693 +2011_001699 +2011_001705 +2011_001707 +2011_001708 +2011_001712 +2011_001713 +2011_001714 +2011_001719 +2011_001720 +2011_001722 +2011_001726 +2011_001741 +2011_001745 +2011_001747 +2011_001748 +2011_001751 +2011_001757 +2011_001770 +2011_001771 +2011_001775 +2011_001782 +2011_001785 +2011_001793 +2011_001794 +2011_001800 +2011_001801 +2011_001806 +2011_001812 +2011_001815 +2011_001819 +2011_001820 +2011_001822 +2011_001824 +2011_001825 +2011_001827 +2011_001834 +2011_001837 +2011_001841 +2011_001842 +2011_001845 +2011_001847 +2011_001854 +2011_001856 +2011_001858 +2011_001862 +2011_001863 +2011_001868 +2011_001870 +2011_001873 +2011_001876 +2011_001877 +2011_001880 +2011_001900 +2011_001910 +2011_001911 +2011_001914 +2011_001919 +2011_001927 +2011_001932 +2011_001941 +2011_001942 +2011_001945 +2011_001946 +2011_001951 +2011_001962 +2011_001966 +2011_001975 +2011_001980 +2011_001982 +2011_001984 +2011_001986 +2011_001988 +2011_001989 +2011_002002 +2011_002003 +2011_002004 +2011_002016 +2011_002018 +2011_002019 +2011_002021 +2011_002033 +2011_002036 +2011_002038 +2011_002040 +2011_002041 +2011_002042 +2011_002044 +2011_002045 +2011_002047 +2011_002064 +2011_002074 +2011_002075 +2011_002079 +2011_002088 +2011_002091 +2011_002093 +2011_002098 +2011_002100 +2011_002102 +2011_002105 +2011_002108 +2011_002109 +2011_002110 +2011_002116 +2011_002121 +2011_002124 +2011_002128 +2011_002132 +2011_002137 +2011_002150 +2011_002154 +2011_002156 +2011_002158 +2011_002159 +2011_002160 +2011_002163 +2011_002169 +2011_002173 +2011_002174 +2011_002178 +2011_002184 +2011_002185 +2011_002192 +2011_002193 +2011_002200 +2011_002215 +2011_002221 +2011_002223 +2011_002230 +2011_002234 +2011_002241 +2011_002244 +2011_002247 +2011_002248 +2011_002260 +2011_002269 +2011_002270 +2011_002272 +2011_002276 +2011_002279 +2011_002280 +2011_002292 +2011_002294 +2011_002295 +2011_002298 +2011_002301 +2011_002308 +2011_002312 +2011_002317 +2011_002322 +2011_002324 +2011_002325 +2011_002327 +2011_002330 +2011_002343 +2011_002357 +2011_002358 +2011_002362 +2011_002365 +2011_002366 +2011_002371 +2011_002379 +2011_002380 +2011_002384 +2011_002386 +2011_002391 +2011_002393 +2011_002395 +2011_002396 +2011_002406 +2011_002407 +2011_002409 +2011_002414 +2011_002429 +2011_002453 +2011_002459 +2011_002463 +2011_002479 +2011_002482 +2011_002490 +2011_002491 +2011_002494 +2011_002495 +2011_002498 +2011_002505 +2011_002507 +2011_002509 +2011_002515 +2011_002516 +2011_002519 +2011_002520 +2011_002531 +2011_002532 +2011_002535 +2011_002536 +2011_002542 +2011_002548 +2011_002556 +2011_002558 +2011_002566 +2011_002575 +2011_002578 +2011_002579 +2011_002582 +2011_002583 +2011_002588 +2011_002589 +2011_002592 +2011_002605 +2011_002610 +2011_002612 +2011_002617 +2011_002623 +2011_002624 +2011_002629 +2011_002631 +2011_002639 +2011_002640 +2011_002641 +2011_002644 +2011_002662 +2011_002674 +2011_002675 +2011_002678 +2011_002685 +2011_002687 +2011_002694 +2011_002699 +2011_002713 +2011_002714 +2011_002725 +2011_002730 +2011_002738 +2011_002740 +2011_002742 +2011_002750 +2011_002751 +2011_002754 +2011_002760 +2011_002765 +2011_002766 +2011_002772 +2011_002784 +2011_002786 +2011_002796 +2011_002802 +2011_002805 +2011_002810 +2011_002812 +2011_002817 +2011_002830 +2011_002831 +2011_002833 +2011_002838 +2011_002841 +2011_002854 +2011_002863 +2011_002864 +2011_002868 +2011_002870 +2011_002871 +2011_002879 +2011_002880 +2011_002883 +2011_002885 +2011_002887 +2011_002890 +2011_002897 +2011_002900 +2011_002916 +2011_002925 +2011_002929 +2011_002933 +2011_002943 +2011_002944 +2011_002951 +2011_002962 +2011_002967 +2011_002970 +2011_002971 +2011_002975 +2011_002978 +2011_002983 +2011_002985 +2011_002992 +2011_002993 +2011_002994 +2011_002997 +2011_002999 +2011_003003 +2011_003011 +2011_003012 +2011_003013 +2011_003019 +2011_003023 +2011_003027 +2011_003028 +2011_003029 +2011_003030 +2011_003039 +2011_003043 +2011_003050 +2011_003055 +2011_003059 +2011_003076 +2011_003079 +2011_003085 +2011_003086 +2011_003089 +2011_003097 +2011_003098 +2011_003103 +2011_003111 +2011_003114 +2011_003115 +2011_003145 +2011_003146 +2011_003149 +2011_003152 +2011_003163 +2011_003166 +2011_003167 +2011_003168 +2011_003169 +2011_003176 +2011_003182 +2011_003185 +2011_003197 +2011_003201 +2011_003205 +2011_003207 +2011_003211 +2011_003212 +2011_003213 +2011_003220 +2011_003228 +2011_003232 +2011_003240 +2011_003242 +2011_003244 +2011_003254 +2011_003256 +2011_003260 +2011_003261 +2011_003262 +2011_003269 +2011_003271 +2011_003275 diff --git a/ImageSets/README.md b/ImageSets/README.md new file mode 100644 index 0000000..44b3a3a --- /dev/null +++ b/ImageSets/README.md @@ -0,0 +1 @@ +Please copy this folder to `$RFCN_ROOT/data/VOCdevkit0712/VOC0712` diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..317ecb6 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Yuwen Xiong + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..f3601c0 --- /dev/null +++ b/README.md @@ -0,0 +1,150 @@ +# py-R-FCN +R-FCN: Object Detection via Region-based Fully Convolutional Networks + +py-R-FCN now supports end-to-end training. + +### Disclaimer + +The official R-FCN code (written in MATLAB) is available [here](https://github.com/daijifeng001/R-FCN). +If your goal is to reproduce the results in the [NIPS 2016 paper](https://arxiv.org/abs/1605.06409), please use the [official code](https://github.com/daijifeng001/R-FCN). + +py-R-FCN is based on the [py-faster-rcnn code](https://github.com/rbgirshick/py-faster-rcnn ) and [the offcial R-FCN implementation](https://github.com/daijifeng001/R-FCN), and the usage is quite similar to [py-faster-rcnn](https://github.com/rbgirshick/py-faster-rcnn ), thanks for them. + +There are slight differences between the two implementations. +In particular, this Python port + - is ~10% slower at test-time, because some operations execute on the CPU in Python layers (e.g., 90ms / image vs. 99ms / image for ResNet-50) + - gives similar, but not exactly the same, mAP as the MATLAB version + +The original py-faster-rcnn uses class-aware bounding box regression. However, R-FCN use agonistic bounding box regression to reduce model complexity. So I add a configuration AGONISTIC into fast_rcnn/config.py, and the default value is False. You should set it to True both on train and test phase if you want to use class agonistic training and test. + +### License + +R-FCN is released under the MIT License (refer to the LICENSE file for details). + +### Citing R-FCN + +If you find R-FCN useful in your research, please consider citing: + + @article{dai16rfcn, + Author = {Jifeng Dai, Yi Li, Kaiming He, Jian Sun}, + Title = {{R-FCN}: Object Detection via Region-based Fully Convolutional Networks}, + Journal = {arXiv preprint arXiv:1605.06409}, + Year = {2016} + } + +### Main Results + | training data | test data | mAP | time/img (K40) | time/img (Titian X) +-------------------|:-------------------:|:---------------------:|:-----:|:--------------:|:------------------:| +R-FCN, ResNet-50 | VOC 07+12 trainval | VOC 07 test | 76.8% | N/A | 0.099sec | +R-FCN, ResNet-101 | VOC 07+12 trainval | VOC 07 test | N/A | N/A | N/A | + + +### Requirements: software + +0. **`Important`** Please use the [Microsoft-version Caffe(@commit 1a2be8e)](https://github.com/Microsoft/caffe/tree/1a2be8ecf9ba318d516d79187845e90ac6e73197), this Caffe supports R-FCN layer, and the prototxt in this repository follows the Microsoft-version Caffe's layer name. You need to put the Caffe root folder under py-R-FCN folder, just like what py-faster-rcnn does. + +1. Requirements for `Caffe` and `pycaffe` (see: [Caffe installation instructions](http://caffe.berkeleyvision.org/installation.html)) + + **Note:** Caffe *must* be built with support for Python layers! + + ```make + # In your Makefile.config, make sure to have this line uncommented + WITH_PYTHON_LAYER := 1 + # Unrelatedly, it's also recommended that you use CUDNN + USE_CUDNN := 1 + ``` +2. Python packages you might not have: `cython`, `python-opencv`, `easydict` +3. [Optional] MATLAB is required for **official** PASCAL VOC evaluation only. The code now includes unofficial Python evaluation code. + +### Requirements: hardware + +Any NVIDIA GPU with 6GB or larger memory is OK(4GB is enough for ResNet-50). + +### Demo +1. I do not provide demo currently, I'll add it soon. + +### Preparation for Training & Testing +1. Download the training, validation, test data and VOCdevkit + + ```Shell + wget http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtrainval_06-Nov-2007.tar + wget http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtest_06-Nov-2007.tar + wget http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCdevkit_08-Jun-2007.tar + wget http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCtrainval_11-May-2012.tar + ``` + +2. Extract all of these tars into one directory named `VOCdevkit` + + ```Shell + tar xvf VOCtrainval_06-Nov-2007.tar + tar xvf VOCtest_06-Nov-2007.tar + tar xvf VOCdevkit_08-Jun-2007.tar + tar xvf VOCtrainval_11-May-2012.tar + ``` + +3. It should have this basic structure + + ```Shell + $VOCdevkit/ # development kit + $VOCdevkit/VOCcode/ # VOC utility code + $VOCdevkit/VOC2007 # image sets, annotations, etc. + $VOCdevkit/VOC2012 # image sets, annotations, etc. + # ... and several other directories ... + ``` + +4. Since py-faster-rcnn does not support muliple training dataset, we need to merge VOC 2007 data and VOC 2012 data maually. Just make a new directory named `VOC0712`, put all subfolder except `ImageSets` in `VOC2007` and `VOC2012` into `VOC0712`(you'll merge some folder). I provided merged-version ImageSet text files for you, please put them into VOCdevkit/VOC0712/ImageSets. + +5. Then the folder structure should look like this + ```Shell + $VOCdevkit/ # development kit + $VOCdevkit/VOCcode/ # VOC utility code + $VOCdevkit/VOC2007 # image sets, annotations, etc. + $VOCdevkit/VOC2012 # image sets, annotations, etc. + $VOCdevkit/VOC0712 # you just created this folder + # ... and several other directories ... + ``` + +4. Create symlinks for the PASCAL VOC dataset + + ```Shell + cd $RFCN_ROOT/data + ln -s $VOCdevkit VOCdevkit0712 + ``` + +5. Please download ImageNet-pre-trained ResNet-50 and ResNet-100 model manually, and put them into `$RFCN_ROOT/data/imagenet_models` +8. Then everything is done, you could train your own model. + +### Usage + +To train and test a R-FCN detector using the **approximate joint training** method, use `experiments/scripts/rfcn_end2end.sh`. +Output is written underneath `$RFCN_ROOT/output`. + +To train and test a R-FCN detector using the **approximate joint training** method **with OHEM**, use `experiments/scripts/rfcn_end2end_ohem.sh`. +Output is written underneath `$RFCN_ROOT/output`. + +```Shell +cd $RFCN_ROOT +./experiments/scripts/rfcn_end2end[_ohem].sh [GPU_ID] [NET] [--set ...] +# GPU_ID is the GPU you want to train on +# NET in {ResNet-50, ResNet-101} is the network arch to use +# --set ... allows you to specify fast_rcnn.config options, e.g. +# --set EXP_DIR seed_rng1701 RNG_SEED 1701 +``` + +Trained R-FCN networks are saved under: + +``` +output/// +``` + +Test outputs are saved under: + +``` +output//// +``` + +### Misc + +Tested on Ubuntu 14.04 with a Titan X GPU and Intel Xeon CPU E5-2620 v2 @ 2.10GHz + +py-faster-rcnn code can also work properly, but I do not add any other feature(such as ResNet and OHEM). diff --git a/data/.gitignore b/data/.gitignore new file mode 100644 index 0000000..dd09a04 --- /dev/null +++ b/data/.gitignore @@ -0,0 +1,6 @@ +selective_search* +imagenet_models* +fast_rcnn_models* +faster_rcnn_models* +VOCdevkit* +cache diff --git a/data/README.md b/data/README.md new file mode 100644 index 0000000..109c453 --- /dev/null +++ b/data/README.md @@ -0,0 +1,69 @@ +This directory holds (*after you download them*): +- Caffe models pre-trained on ImageNet +- Faster R-CNN models +- Symlinks to datasets + +To download Caffe models (ZF, VGG16) pre-trained on ImageNet, run: + +``` +./data/scripts/fetch_imagenet_models.sh +``` + +This script will populate `data/imagenet_models`. + +To download Faster R-CNN models trained on VOC 2007, run: + +``` +./data/scripts/fetch_faster_rcnn_models.sh +``` + +This script will populate `data/faster_rcnn_models`. + +In order to train and test with PASCAL VOC, you will need to establish symlinks. +From the `data` directory (`cd data`): + +``` +# For VOC 2007 +ln -s /your/path/to/VOC2007/VOCdevkit VOCdevkit2007 + +# For VOC 2012 +ln -s /your/path/to/VOC2012/VOCdevkit VOCdevkit2012 +``` + +Install the MS COCO dataset at /path/to/coco + +``` +ln -s /path/to/coco coco +``` + +For COCO with Fast R-CNN, place object proposals under `coco_proposals` (inside +the `data` directory). You can obtain proposals on COCO from Jan Hosang at +https://www.mpi-inf.mpg.de/departments/computer-vision-and-multimodal-computing/research/object-recognition-and-scene-understanding/how-good-are-detection-proposals-really/. +For COCO, using MCG is recommended over selective search. MCG boxes can be downloaded +from http://www.eecs.berkeley.edu/Research/Projects/CS/vision/grouping/mcg/. +Use the tool `lib/datasets/tools/mcg_munge.py` to convert the downloaded MCG data +into the same file layout as those from Jan Hosang. + +Since you'll likely be experimenting with multiple installs of Fast/er R-CNN in +parallel, you'll probably want to keep all of this data in a shared place and +use symlinks. On my system I create the following symlinks inside `data`: + +Annotations for the 5k image 'minival' subset of COCO val2014 that I like to use +can be found at http://www.cs.berkeley.edu/~rbg/faster-rcnn-data/instances_minival2014.json.zip. +Annotations for COCO val2014 (set) minus minival (~35k images) can be found at +http://www.cs.berkeley.edu/~rbg/faster-rcnn-data/instances_valminusminival2014.json.zip. + +``` +# data/cache holds various outputs created by the datasets package +ln -s /data/fast_rcnn_shared/cache + +# move the imagenet_models to shared location and symlink to them +ln -s /data/fast_rcnn_shared/imagenet_models + +# move the selective search data to a shared location and symlink to them +# (only applicable to Fast R-CNN training) +ln -s /data/fast_rcnn_shared/selective_search_data + +ln -s /data/VOC2007/VOCdevkit VOCdevkit2007 +ln -s /data/VOC2012/VOCdevkit VOCdevkit2012 +``` diff --git a/data/demo/000456.jpg b/data/demo/000456.jpg new file mode 100644 index 0000000..480afa5 Binary files /dev/null and b/data/demo/000456.jpg differ diff --git a/data/demo/000542.jpg b/data/demo/000542.jpg new file mode 100644 index 0000000..d87e919 Binary files /dev/null and b/data/demo/000542.jpg differ diff --git a/data/demo/001150.jpg b/data/demo/001150.jpg new file mode 100644 index 0000000..e8c86a4 Binary files /dev/null and b/data/demo/001150.jpg differ diff --git a/data/demo/001763.jpg b/data/demo/001763.jpg new file mode 100644 index 0000000..c80ebd9 Binary files /dev/null and b/data/demo/001763.jpg differ diff --git a/data/demo/004545.jpg b/data/demo/004545.jpg new file mode 100644 index 0000000..4e06c20 Binary files /dev/null and b/data/demo/004545.jpg differ diff --git a/data/pylintrc b/data/pylintrc new file mode 100644 index 0000000..a348aea --- /dev/null +++ b/data/pylintrc @@ -0,0 +1,3 @@ +[TYPECHECK] + +ignored-modules = numpy, numpy.random, cv2 diff --git a/data/scripts/fetch_faster_rcnn_models.sh b/data/scripts/fetch_faster_rcnn_models.sh new file mode 100755 index 0000000..019771d --- /dev/null +++ b/data/scripts/fetch_faster_rcnn_models.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../" && pwd )" +cd $DIR + +FILE=faster_rcnn_models.tgz +URL=http://www.cs.berkeley.edu/~rbg/faster-rcnn-data/$FILE +CHECKSUM=ac116844f66aefe29587214272054668 + +if [ -f $FILE ]; then + echo "File already exists. Checking md5..." + os=`uname -s` + if [ "$os" = "Linux" ]; then + checksum=`md5sum $FILE | awk '{ print $1 }'` + elif [ "$os" = "Darwin" ]; then + checksum=`cat $FILE | md5` + fi + if [ "$checksum" = "$CHECKSUM" ]; then + echo "Checksum is correct. No need to download." + exit 0 + else + echo "Checksum is incorrect. Need to download again." + fi +fi + +echo "Downloading Faster R-CNN demo models (695M)..." + +wget $URL -O $FILE + +echo "Unzipping..." + +tar zxvf $FILE + +echo "Done. Please run this command again to verify that checksum = $CHECKSUM." diff --git a/data/scripts/fetch_imagenet_models.sh b/data/scripts/fetch_imagenet_models.sh new file mode 100755 index 0000000..de18050 --- /dev/null +++ b/data/scripts/fetch_imagenet_models.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../" && pwd )" +cd $DIR + +FILE=imagenet_models.tgz +URL=http://www.cs.berkeley.edu/~rbg/faster-rcnn-data/$FILE +CHECKSUM=ed34ca912d6782edfb673a8c3a0bda6d + +if [ -f $FILE ]; then + echo "File already exists. Checking md5..." + os=`uname -s` + if [ "$os" = "Linux" ]; then + checksum=`md5sum $FILE | awk '{ print $1 }'` + elif [ "$os" = "Darwin" ]; then + checksum=`cat $FILE | md5` + fi + if [ "$checksum" = "$CHECKSUM" ]; then + echo "Checksum is correct. No need to download." + exit 0 + else + echo "Checksum is incorrect. Need to download again." + fi +fi + +echo "Downloading pretrained ImageNet models (1G)..." + +wget $URL -O $FILE + +echo "Unzipping..." + +tar zxvf $FILE + +echo "Done. Please run this command again to verify that checksum = $CHECKSUM." diff --git a/data/scripts/fetch_selective_search_data.sh b/data/scripts/fetch_selective_search_data.sh new file mode 100755 index 0000000..84c0aca --- /dev/null +++ b/data/scripts/fetch_selective_search_data.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../" && pwd )" +cd $DIR + +FILE=selective_search_data.tgz +URL=http://www.cs.berkeley.edu/~rbg/fast-rcnn-data/$FILE +CHECKSUM=7078c1db87a7851b31966b96774cd9b9 + +if [ -f $FILE ]; then + echo "File already exists. Checking md5..." + os=`uname -s` + if [ "$os" = "Linux" ]; then + checksum=`md5sum $FILE | awk '{ print $1 }'` + elif [ "$os" = "Darwin" ]; then + checksum=`cat $FILE | md5` + fi + if [ "$checksum" = "$CHECKSUM" ]; then + echo "Checksum is correct. No need to download." + exit 0 + else + echo "Checksum is incorrect. Need to download again." + fi +fi + +echo "Downloading precomputed selective search boxes (0.5G)..." + +wget $URL -O $FILE + +echo "Unzipping..." + +tar zxvf $FILE + +echo "Done. Please run this command again to verify that checksum = $CHECKSUM." diff --git a/experiments/README.md b/experiments/README.md new file mode 100644 index 0000000..076257d --- /dev/null +++ b/experiments/README.md @@ -0,0 +1,5 @@ +Scripts are under `experiments/scripts`. + +Each script saves a log file under `experiments/logs`. + +Configuration override files used in the experiments are stored in `experiments/cfgs`. diff --git a/experiments/cfgs/faster_rcnn_alt_opt.yml b/experiments/cfgs/faster_rcnn_alt_opt.yml new file mode 100644 index 0000000..9f11bd6 --- /dev/null +++ b/experiments/cfgs/faster_rcnn_alt_opt.yml @@ -0,0 +1,5 @@ +EXP_DIR: faster_rcnn_alt_opt +TRAIN: + BG_THRESH_LO: 0.0 +TEST: + HAS_RPN: True diff --git a/experiments/cfgs/faster_rcnn_end2end.yml b/experiments/cfgs/faster_rcnn_end2end.yml new file mode 100644 index 0000000..4c6a054 --- /dev/null +++ b/experiments/cfgs/faster_rcnn_end2end.yml @@ -0,0 +1,11 @@ +EXP_DIR: faster_rcnn_end2end +TRAIN: + HAS_RPN: True + IMS_PER_BATCH: 1 + BBOX_NORMALIZE_TARGETS_PRECOMPUTED: True + RPN_POSITIVE_OVERLAP: 0.7 + RPN_BATCHSIZE: 256 + PROPOSAL_METHOD: gt + BG_THRESH_LO: 0.0 +TEST: + HAS_RPN: True diff --git a/experiments/cfgs/rfcn_end2end.yml b/experiments/cfgs/rfcn_end2end.yml new file mode 100644 index 0000000..ebeca75 --- /dev/null +++ b/experiments/cfgs/rfcn_end2end.yml @@ -0,0 +1,15 @@ +EXP_DIR: rfcn_end2end +TRAIN: + HAS_RPN: True + IMS_PER_BATCH: 1 + BBOX_NORMALIZE_TARGETS_PRECOMPUTED: True + RPN_POSITIVE_OVERLAP: 0.7 + RPN_BATCHSIZE: 256 + PROPOSAL_METHOD: gt + BG_THRESH_LO: 0.1 + BATCH_SIZE: 128 + AGONISTIC: True + SNAPSHOT_ITERS: 10000 +TEST: + HAS_RPN: True + AGONISTIC: True diff --git a/experiments/cfgs/rfcn_end2end_ohem.yml b/experiments/cfgs/rfcn_end2end_ohem.yml new file mode 100644 index 0000000..dc00697 --- /dev/null +++ b/experiments/cfgs/rfcn_end2end_ohem.yml @@ -0,0 +1,16 @@ +EXP_DIR: rfcn_end2end_ohem +TRAIN: + HAS_RPN: True + IMS_PER_BATCH: 1 + BBOX_NORMALIZE_TARGETS_PRECOMPUTED: True + RPN_POSITIVE_OVERLAP: 0.7 + RPN_BATCHSIZE: 256 + PROPOSAL_METHOD: gt + BG_THRESH_LO: 0.0 + BATCH_SIZE: -1 + AGONISTIC: True + SNAPSHOT_ITERS: 1000 + RPN_MIN_SIZE: 16 +TEST: + HAS_RPN: True + AGONISTIC: True diff --git a/experiments/logs/.gitignore b/experiments/logs/.gitignore new file mode 100644 index 0000000..355c023 --- /dev/null +++ b/experiments/logs/.gitignore @@ -0,0 +1 @@ +*.txt* diff --git a/experiments/scripts/fast_rcnn.sh b/experiments/scripts/fast_rcnn.sh new file mode 100755 index 0000000..9f4e72e --- /dev/null +++ b/experiments/scripts/fast_rcnn.sh @@ -0,0 +1,63 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/fast_rcnn.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is either pascal_voc or coco. +# +# Example: +# ./experiments/scripts/fast_rcnn.sh 0 VGG_CNN_M_1024 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_2007_trainval" + TEST_IMDB="voc_2007_test" + PT_DIR="pascal_voc" + ITERS=40000 + ;; + coco) + TRAIN_IMDB="coco_2014_train" + TEST_IMDB="coco_2014_minival" + PT_DIR="coco" + ITERS=280000 + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/fast_rcnn_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/fast_rcnn/solver.prototxt \ + --weights data/imagenet_models/${NET}.v2.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --iters ${ITERS} \ + ${EXTRA_ARGS} + +set +x +NET_FINAL=`grep -B 1 "done solving" ${LOG} | grep "Wrote snapshot" | awk '{print $4}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/fast_rcnn/test.prototxt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + ${EXTRA_ARGS} diff --git a/experiments/scripts/faster_rcnn_alt_opt.sh b/experiments/scripts/faster_rcnn_alt_opt.sh new file mode 100755 index 0000000..d2a1ebe --- /dev/null +++ b/experiments/scripts/faster_rcnn_alt_opt.sh @@ -0,0 +1,62 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/faster_rcnn_alt_opt.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is only pascal_voc for now +# +# Example: +# ./experiments/scripts/faster_rcnn_alt_opt.sh 0 VGG_CNN_M_1024 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_2007_trainval" + TEST_IMDB="voc_2007_test" + PT_DIR="pascal_voc" + ITERS=40000 + ;; + coco) + echo "Not implemented: use experiments/scripts/faster_rcnn_end2end.sh for coco" + exit + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/faster_rcnn_alt_opt_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + +time ./tools/train_faster_rcnn_alt_opt.py --gpu ${GPU_ID} \ + --net_name ${NET} \ + --weights data/imagenet_models/${NET}.v2.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --cfg experiments/cfgs/faster_rcnn_alt_opt.yml \ + ${EXTRA_ARGS} + +set +x +NET_FINAL=`grep "Final model:" ${LOG} | awk '{print $3}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/faster_rcnn_alt_opt/faster_rcnn_test.pt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + --cfg experiments/cfgs/faster_rcnn_alt_opt.yml \ + ${EXTRA_ARGS} diff --git a/experiments/scripts/faster_rcnn_end2end.sh b/experiments/scripts/faster_rcnn_end2end.sh new file mode 100755 index 0000000..79770aa --- /dev/null +++ b/experiments/scripts/faster_rcnn_end2end.sh @@ -0,0 +1,68 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/faster_rcnn_end2end.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is either pascal_voc or coco. +# +# Example: +# ./experiments/scripts/faster_rcnn_end2end.sh 0 VGG_CNN_M_1024 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_2007_trainval" + TEST_IMDB="voc_2007_test" + PT_DIR="pascal_voc" + ITERS=70000 + ;; + coco) + # This is a very long and slow training schedule + # You can probably use fewer iterations and reduce the + # time to the LR drop (set in the solver to 350,000 iterations). + TRAIN_IMDB="coco_2014_train" + TEST_IMDB="coco_2014_minival" + PT_DIR="coco" + ITERS=490000 + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/faster_rcnn_end2end_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/faster_rcnn_end2end/solver.prototxt \ + --weights data/imagenet_models/${NET}.v2.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --iters ${ITERS} \ + --cfg experiments/cfgs/faster_rcnn_end2end.yml \ + ${EXTRA_ARGS} + +set +x +NET_FINAL=`grep -B 1 "done solving" ${LOG} | grep "Wrote snapshot" | awk '{print $4}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/faster_rcnn_end2end/test.prototxt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + --cfg experiments/cfgs/faster_rcnn_end2end.yml \ + ${EXTRA_ARGS} diff --git a/experiments/scripts/rfcn_end2end.sh b/experiments/scripts/rfcn_end2end.sh new file mode 100755 index 0000000..c54f9e1 --- /dev/null +++ b/experiments/scripts/rfcn_end2end.sh @@ -0,0 +1,68 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/rfcn_end2end.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is either pascal_voc or coco. +# +# Example: +# ./experiments/scripts/rfcn_end2end.sh 0 ResNet50 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_0712_trainval" + TEST_IMDB="voc_0712_test" + PT_DIR="pascal_voc" + ITERS=120000 + ;; + coco) + # This is a very long and slow training schedule + # You can probably use fewer iterations and reduce the + # time to the LR drop (set in the solver to 350,000 iterations). + TRAIN_IMDB="coco_2014_train" + TEST_IMDB="coco_2014_minival" + PT_DIR="coco" + ITERS=490000 + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/rfcn_end2end_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/rfcn_end2end/solver.prototxt \ + --weights data/imagenet_models/${NET}-model.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --iters ${ITERS} \ + --cfg experiments/cfgs/rfcn_end2end.yml \ + ${EXTRA_ARGS} + +set +x +NET_FINAL=`grep -B 1 "done solving" ${LOG} | grep "Wrote snapshot" | awk '{print $4}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/rfcn_end2end/test_agonistic.prototxt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + --cfg experiments/cfgs/rfcn_end2end.yml \ + ${EXTRA_ARGS} diff --git a/experiments/scripts/rfcn_end2end_ohem.sh b/experiments/scripts/rfcn_end2end_ohem.sh new file mode 100755 index 0000000..ed42477 --- /dev/null +++ b/experiments/scripts/rfcn_end2end_ohem.sh @@ -0,0 +1,70 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/rfcn_end2end_ohem.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is either pascal_voc or coco. +# +# Example: +# ./experiments/scripts/rfcn_end2end_ohem.sh 0 ResNet50 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_0712_trainval" + TEST_IMDB="voc_0712_test" + PT_DIR="pascal_voc" + ITERS=120000 + ;; + coco) + # This is a very long and slow training schedule + # You can probably use fewer iterations and reduce the + # time to the LR drop (set in the solver to 350,000 iterations). + TRAIN_IMDB="coco_2014_train" + TEST_IMDB="coco_2014_minival" + PT_DIR="coco" + ITERS=490000 + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/rfcn_end2end_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/rfcn_end2end/solver_ohem.prototxt \ + --weights data/imagenet_models/${NET}-model.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --iters ${ITERS} \ + --cfg experiments/cfgs/rfcn_end2end_ohem.yml \ + ${EXTRA_ARGS} + + +set +x +NET_FINAL=`tail -n 100 ${LOG} | grep -B 1 "done solving" | grep "Wrote snapshot" | awk '{print $4}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/rfcn_end2end/test_agonistic.prototxt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + --cfg experiments/cfgs/rfcn_end2end_ohem.yml \ + ${EXTRA_ARGS} diff --git a/experiments/scripts/rfcn_end2end_ohem_warmup.sh b/experiments/scripts/rfcn_end2end_ohem_warmup.sh new file mode 100755 index 0000000..4efbac7 --- /dev/null +++ b/experiments/scripts/rfcn_end2end_ohem_warmup.sh @@ -0,0 +1,84 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/rfcn_end2end_ohem.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is either pascal_voc or coco. +# +# Example: +# ./experiments/scripts/rfcn_end2end_ohem.sh 0 ResNet50 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_0712_trainval" + TEST_IMDB="voc_0712_test" + PT_DIR="pascal_voc" + WARMUP_ITERS=10000 + ITERS=150000 + ;; + coco) + # This is a very long and slow training schedule + # You can probably use fewer iterations and reduce the + # time to the LR drop (set in the solver to 350,000 iterations). + TRAIN_IMDB="coco_2014_train" + TEST_IMDB="coco_2014_minival" + PT_DIR="coco" + WARMUP_ITERS=10000 + ITERS=480000 + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/rfcn_end2end_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/rfcn_end2end/solver_warmup.prototxt \ + --weights data/imagenet_models/${NET}-model.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --iters ${WARMUP_ITERS} \ + --cfg experiments/cfgs/rfcn_end2end_ohem.yml \ + ${EXTRA_ARGS} + +set +x +NET_CONTINUE=`tail -n 100 ${LOG} | grep -B 1 "done solving" | grep "Wrote snapshot" | awk '{print $4}'` +set -x + + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/rfcn_end2end/solver_continue.prototxt \ + --weights ${NET_CONTINUE} \ + --imdb ${TRAIN_IMDB} \ + --iters ${ITERS} \ + --cfg experiments/cfgs/rfcn_end2end_ohem.yml \ + ${EXTRA_ARGS} + + +set +x +NET_FINAL=`tail -n 100 ${LOG} | grep -B 1 "done solving" | grep "Wrote snapshot" | awk '{print $4}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/rfcn_end2end/test_agonistic.prototxt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + --cfg experiments/cfgs/rfcn_end2end_ohem.yml \ + ${EXTRA_ARGS} diff --git a/lib/Makefile b/lib/Makefile new file mode 100644 index 0000000..a482398 --- /dev/null +++ b/lib/Makefile @@ -0,0 +1,3 @@ +all: + python setup.py build_ext --inplace + rm -rf build diff --git a/lib/datasets/VOCdevkit-matlab-wrapper/get_voc_opts.m b/lib/datasets/VOCdevkit-matlab-wrapper/get_voc_opts.m new file mode 100644 index 0000000..629597a --- /dev/null +++ b/lib/datasets/VOCdevkit-matlab-wrapper/get_voc_opts.m @@ -0,0 +1,14 @@ +function VOCopts = get_voc_opts(path) + +tmp = pwd; +cd(path); +try + addpath('VOCcode'); + VOCinit; +catch + rmpath('VOCcode'); + cd(tmp); + error(sprintf('VOCcode directory not found under %s', path)); +end +rmpath('VOCcode'); +cd(tmp); diff --git a/lib/datasets/VOCdevkit-matlab-wrapper/voc_eval.m b/lib/datasets/VOCdevkit-matlab-wrapper/voc_eval.m new file mode 100644 index 0000000..1911a0e --- /dev/null +++ b/lib/datasets/VOCdevkit-matlab-wrapper/voc_eval.m @@ -0,0 +1,56 @@ +function res = voc_eval(path, comp_id, test_set, output_dir) + +VOCopts = get_voc_opts(path); +VOCopts.testset = test_set; + +for i = 1:length(VOCopts.classes) + cls = VOCopts.classes{i}; + res(i) = voc_eval_cls(cls, VOCopts, comp_id, output_dir); +end + +fprintf('\n~~~~~~~~~~~~~~~~~~~~\n'); +fprintf('Results:\n'); +aps = [res(:).ap]'; +fprintf('%.1f\n', aps * 100); +fprintf('%.1f\n', mean(aps) * 100); +fprintf('~~~~~~~~~~~~~~~~~~~~\n'); + +function res = voc_eval_cls(cls, VOCopts, comp_id, output_dir) + +test_set = VOCopts.testset; +year = VOCopts.dataset(4:end); + +addpath(fullfile(VOCopts.datadir, 'VOCcode')); + +res_fn = sprintf(VOCopts.detrespath, comp_id, cls); + +recall = []; +prec = []; +ap = 0; +ap_auc = 0; + +do_eval = (str2num(year) <= 2007) | ~strcmp(test_set, 'test'); +if do_eval + % Bug in VOCevaldet requires that tic has been called first + tic; + [recall, prec, ap] = VOCevaldet(VOCopts, comp_id, cls, true); + ap_auc = xVOCap(recall, prec); + + % force plot limits + ylim([0 1]); + xlim([0 1]); + + print(gcf, '-djpeg', '-r0', ... + [output_dir '/' cls '_pr.jpg']); +end +fprintf('!!! %s : %.4f %.4f\n', cls, ap, ap_auc); + +res.recall = recall; +res.prec = prec; +res.ap = ap; +res.ap_auc = ap_auc; + +save([output_dir '/' cls '_pr.mat'], ... + 'res', 'recall', 'prec', 'ap', 'ap_auc'); + +rmpath(fullfile(VOCopts.datadir, 'VOCcode')); diff --git a/lib/datasets/VOCdevkit-matlab-wrapper/xVOCap.m b/lib/datasets/VOCdevkit-matlab-wrapper/xVOCap.m new file mode 100644 index 0000000..de6c628 --- /dev/null +++ b/lib/datasets/VOCdevkit-matlab-wrapper/xVOCap.m @@ -0,0 +1,10 @@ +function ap = xVOCap(rec,prec) +% From the PASCAL VOC 2011 devkit + +mrec=[0 ; rec ; 1]; +mpre=[0 ; prec ; 0]; +for i=numel(mpre)-1:-1:1 + mpre(i)=max(mpre(i),mpre(i+1)); +end +i=find(mrec(2:end)~=mrec(1:end-1))+1; +ap=sum((mrec(i)-mrec(i-1)).*mpre(i)); diff --git a/lib/datasets/__init__.py b/lib/datasets/__init__.py new file mode 100644 index 0000000..7ba6a65 --- /dev/null +++ b/lib/datasets/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- diff --git a/lib/datasets/coco.py b/lib/datasets/coco.py new file mode 100644 index 0000000..bfe8ff3 --- /dev/null +++ b/lib/datasets/coco.py @@ -0,0 +1,394 @@ +# -------------------------------------------------------- +# Fast/er R-CNN +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +from datasets.imdb import imdb +import datasets.ds_utils as ds_utils +from fast_rcnn.config import cfg +import os.path as osp +import sys +import os +import numpy as np +import scipy.sparse +import scipy.io as sio +import cPickle +import json +import uuid +# COCO API +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval +from pycocotools import mask as COCOmask + +def _filter_crowd_proposals(roidb, crowd_thresh): + """ + Finds proposals that are inside crowd regions and marks them with + overlap = -1 (for all gt rois), which means they will be excluded from + training. + """ + for ix, entry in enumerate(roidb): + overlaps = entry['gt_overlaps'].toarray() + crowd_inds = np.where(overlaps.max(axis=1) == -1)[0] + non_gt_inds = np.where(entry['gt_classes'] == 0)[0] + if len(crowd_inds) == 0 or len(non_gt_inds) == 0: + continue + iscrowd = [int(True) for _ in xrange(len(crowd_inds))] + crowd_boxes = ds_utils.xyxy_to_xywh(entry['boxes'][crowd_inds, :]) + non_gt_boxes = ds_utils.xyxy_to_xywh(entry['boxes'][non_gt_inds, :]) + ious = COCOmask.iou(non_gt_boxes, crowd_boxes, iscrowd) + bad_inds = np.where(ious.max(axis=1) > crowd_thresh)[0] + overlaps[non_gt_inds[bad_inds], :] = -1 + roidb[ix]['gt_overlaps'] = scipy.sparse.csr_matrix(overlaps) + return roidb + +class coco(imdb): + def __init__(self, image_set, year): + imdb.__init__(self, 'coco_' + year + '_' + image_set) + # COCO specific config options + self.config = {'top_k' : 2000, + 'use_salt' : True, + 'cleanup' : True, + 'crowd_thresh' : 0.7, + 'min_size' : 2} + # name, paths + self._year = year + self._image_set = image_set + self._data_path = osp.join(cfg.DATA_DIR, 'coco') + # load COCO API, classes, class <-> id mappings + self._COCO = COCO(self._get_ann_file()) + cats = self._COCO.loadCats(self._COCO.getCatIds()) + self._classes = tuple(['__background__'] + [c['name'] for c in cats]) + self._class_to_ind = dict(zip(self.classes, xrange(self.num_classes))) + self._class_to_coco_cat_id = dict(zip([c['name'] for c in cats], + self._COCO.getCatIds())) + self._image_index = self._load_image_set_index() + # Default to roidb handler + self.set_proposal_method('selective_search') + self.competition_mode(False) + + # Some image sets are "views" (i.e. subsets) into others. + # For example, minival2014 is a random 5000 image subset of val2014. + # This mapping tells us where the view's images and proposals come from. + self._view_map = { + 'minival2014' : 'val2014', # 5k val2014 subset + 'valminusminival2014' : 'val2014', # val2014 \setminus minival2014 + } + coco_name = image_set + year # e.g., "val2014" + self._data_name = (self._view_map[coco_name] + if self._view_map.has_key(coco_name) + else coco_name) + # Dataset splits that have ground-truth annotations (test splits + # do not have gt annotations) + self._gt_splits = ('train', 'val', 'minival') + + def _get_ann_file(self): + prefix = 'instances' if self._image_set.find('test') == -1 \ + else 'image_info' + return osp.join(self._data_path, 'annotations', + prefix + '_' + self._image_set + self._year + '.json') + + def _load_image_set_index(self): + """ + Load image ids. + """ + image_ids = self._COCO.getImgIds() + return image_ids + + def _get_widths(self): + anns = self._COCO.loadImgs(self._image_index) + widths = [ann['width'] for ann in anns] + return widths + + def image_path_at(self, i): + """ + Return the absolute path to image i in the image sequence. + """ + return self.image_path_from_index(self._image_index[i]) + + def image_path_from_index(self, index): + """ + Construct an image path from the image's "index" identifier. + """ + # Example image path for index=119993: + # images/train2014/COCO_train2014_000000119993.jpg + file_name = ('COCO_' + self._data_name + '_' + + str(index).zfill(12) + '.jpg') + image_path = osp.join(self._data_path, 'images', + self._data_name, file_name) + assert osp.exists(image_path), \ + 'Path does not exist: {}'.format(image_path) + return image_path + + def selective_search_roidb(self): + return self._roidb_from_proposals('selective_search') + + def edge_boxes_roidb(self): + return self._roidb_from_proposals('edge_boxes_AR') + + def mcg_roidb(self): + return self._roidb_from_proposals('MCG') + + def _roidb_from_proposals(self, method): + """ + Creates a roidb from pre-computed proposals of a particular methods. + """ + top_k = self.config['top_k'] + cache_file = osp.join(self.cache_path, self.name + + '_{:s}_top{:d}'.format(method, top_k) + + '_roidb.pkl') + + if osp.exists(cache_file): + with open(cache_file, 'rb') as fid: + roidb = cPickle.load(fid) + print '{:s} {:s} roidb loaded from {:s}'.format(self.name, method, + cache_file) + return roidb + + if self._image_set in self._gt_splits: + gt_roidb = self.gt_roidb() + method_roidb = self._load_proposals(method, gt_roidb) + roidb = imdb.merge_roidbs(gt_roidb, method_roidb) + # Make sure we don't use proposals that are contained in crowds + roidb = _filter_crowd_proposals(roidb, self.config['crowd_thresh']) + else: + roidb = self._load_proposals(method, None) + with open(cache_file, 'wb') as fid: + cPickle.dump(roidb, fid, cPickle.HIGHEST_PROTOCOL) + print 'wrote {:s} roidb to {:s}'.format(method, cache_file) + return roidb + + def _load_proposals(self, method, gt_roidb): + """ + Load pre-computed proposals in the format provided by Jan Hosang: + http://www.mpi-inf.mpg.de/departments/computer-vision-and-multimodal- + computing/research/object-recognition-and-scene-understanding/how- + good-are-detection-proposals-really/ + For MCG, use boxes from http://www.eecs.berkeley.edu/Research/Projects/ + CS/vision/grouping/mcg/ and convert the file layout using + lib/datasets/tools/mcg_munge.py. + """ + box_list = [] + top_k = self.config['top_k'] + valid_methods = [ + 'MCG', + 'selective_search', + 'edge_boxes_AR', + 'edge_boxes_70'] + assert method in valid_methods + + print 'Loading {} boxes'.format(method) + for i, index in enumerate(self._image_index): + if i % 1000 == 0: + print '{:d} / {:d}'.format(i + 1, len(self._image_index)) + + box_file = osp.join( + cfg.DATA_DIR, 'coco_proposals', method, 'mat', + self._get_box_file(index)) + + raw_data = sio.loadmat(box_file)['boxes'] + boxes = np.maximum(raw_data - 1, 0).astype(np.uint16) + if method == 'MCG': + # Boxes from the MCG website are in (y1, x1, y2, x2) order + boxes = boxes[:, (1, 0, 3, 2)] + # Remove duplicate boxes and very small boxes and then take top k + keep = ds_utils.unique_boxes(boxes) + boxes = boxes[keep, :] + keep = ds_utils.filter_small_boxes(boxes, self.config['min_size']) + boxes = boxes[keep, :] + boxes = boxes[:top_k, :] + box_list.append(boxes) + # Sanity check + im_ann = self._COCO.loadImgs(index)[0] + width = im_ann['width'] + height = im_ann['height'] + ds_utils.validate_boxes(boxes, width=width, height=height) + return self.create_roidb_from_box_list(box_list, gt_roidb) + + def gt_roidb(self): + """ + Return the database of ground-truth regions of interest. + This function loads/saves from/to a cache file to speed up future calls. + """ + cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl') + if osp.exists(cache_file): + with open(cache_file, 'rb') as fid: + roidb = cPickle.load(fid) + print '{} gt roidb loaded from {}'.format(self.name, cache_file) + return roidb + + gt_roidb = [self._load_coco_annotation(index) + for index in self._image_index] + + with open(cache_file, 'wb') as fid: + cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL) + print 'wrote gt roidb to {}'.format(cache_file) + return gt_roidb + + def _load_coco_annotation(self, index): + """ + Loads COCO bounding-box instance annotations. Crowd instances are + handled by marking their overlaps (with all categories) to -1. This + overlap value means that crowd "instances" are excluded from training. + """ + im_ann = self._COCO.loadImgs(index)[0] + width = im_ann['width'] + height = im_ann['height'] + + annIds = self._COCO.getAnnIds(imgIds=index, iscrowd=None) + objs = self._COCO.loadAnns(annIds) + # Sanitize bboxes -- some are invalid + valid_objs = [] + for obj in objs: + x1 = np.max((0, obj['bbox'][0])) + y1 = np.max((0, obj['bbox'][1])) + x2 = np.min((width - 1, x1 + np.max((0, obj['bbox'][2] - 1)))) + y2 = np.min((height - 1, y1 + np.max((0, obj['bbox'][3] - 1)))) + if obj['area'] > 0 and x2 >= x1 and y2 >= y1: + obj['clean_bbox'] = [x1, y1, x2, y2] + valid_objs.append(obj) + objs = valid_objs + num_objs = len(objs) + + boxes = np.zeros((num_objs, 4), dtype=np.uint16) + gt_classes = np.zeros((num_objs), dtype=np.int32) + overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32) + seg_areas = np.zeros((num_objs), dtype=np.float32) + + # Lookup table to map from COCO category ids to our internal class + # indices + coco_cat_id_to_class_ind = dict([(self._class_to_coco_cat_id[cls], + self._class_to_ind[cls]) + for cls in self._classes[1:]]) + + for ix, obj in enumerate(objs): + cls = coco_cat_id_to_class_ind[obj['category_id']] + boxes[ix, :] = obj['clean_bbox'] + gt_classes[ix] = cls + seg_areas[ix] = obj['area'] + if obj['iscrowd']: + # Set overlap to -1 for all classes for crowd objects + # so they will be excluded during training + overlaps[ix, :] = -1.0 + else: + overlaps[ix, cls] = 1.0 + + ds_utils.validate_boxes(boxes, width=width, height=height) + overlaps = scipy.sparse.csr_matrix(overlaps) + return {'boxes' : boxes, + 'gt_classes': gt_classes, + 'gt_overlaps' : overlaps, + 'flipped' : False, + 'seg_areas' : seg_areas} + + def _get_box_file(self, index): + # first 14 chars / first 22 chars / all chars + .mat + # COCO_val2014_0/COCO_val2014_000000447/COCO_val2014_000000447991.mat + file_name = ('COCO_' + self._data_name + + '_' + str(index).zfill(12) + '.mat') + return osp.join(file_name[:14], file_name[:22], file_name) + + def _print_detection_eval_metrics(self, coco_eval): + IoU_lo_thresh = 0.5 + IoU_hi_thresh = 0.95 + def _get_thr_ind(coco_eval, thr): + ind = np.where((coco_eval.params.iouThrs > thr - 1e-5) & + (coco_eval.params.iouThrs < thr + 1e-5))[0][0] + iou_thr = coco_eval.params.iouThrs[ind] + assert np.isclose(iou_thr, thr) + return ind + + ind_lo = _get_thr_ind(coco_eval, IoU_lo_thresh) + ind_hi = _get_thr_ind(coco_eval, IoU_hi_thresh) + # precision has dims (iou, recall, cls, area range, max dets) + # area range index 0: all area ranges + # max dets index 2: 100 per image + precision = \ + coco_eval.eval['precision'][ind_lo:(ind_hi + 1), :, :, 0, 2] + ap_default = np.mean(precision[precision > -1]) + print ('~~~~ Mean and per-category AP @ IoU=[{:.2f},{:.2f}] ' + '~~~~').format(IoU_lo_thresh, IoU_hi_thresh) + print '{:.1f}'.format(100 * ap_default) + for cls_ind, cls in enumerate(self.classes): + if cls == '__background__': + continue + # minus 1 because of __background__ + precision = coco_eval.eval['precision'][ind_lo:(ind_hi + 1), :, cls_ind - 1, 0, 2] + ap = np.mean(precision[precision > -1]) + print '{:.1f}'.format(100 * ap) + + print '~~~~ Summary metrics ~~~~' + coco_eval.summarize() + + def _do_detection_eval(self, res_file, output_dir): + ann_type = 'bbox' + coco_dt = self._COCO.loadRes(res_file) + coco_eval = COCOeval(self._COCO, coco_dt) + coco_eval.params.useSegm = (ann_type == 'segm') + coco_eval.evaluate() + coco_eval.accumulate() + self._print_detection_eval_metrics(coco_eval) + eval_file = osp.join(output_dir, 'detection_results.pkl') + with open(eval_file, 'wb') as fid: + cPickle.dump(coco_eval, fid, cPickle.HIGHEST_PROTOCOL) + print 'Wrote COCO eval results to: {}'.format(eval_file) + + def _coco_results_one_category(self, boxes, cat_id): + results = [] + for im_ind, index in enumerate(self.image_index): + dets = boxes[im_ind].astype(np.float) + if dets == []: + continue + scores = dets[:, -1] + xs = dets[:, 0] + ys = dets[:, 1] + ws = dets[:, 2] - xs + 1 + hs = dets[:, 3] - ys + 1 + results.extend( + [{'image_id' : index, + 'category_id' : cat_id, + 'bbox' : [xs[k], ys[k], ws[k], hs[k]], + 'score' : scores[k]} for k in xrange(dets.shape[0])]) + return results + + def _write_coco_results_file(self, all_boxes, res_file): + # [{"image_id": 42, + # "category_id": 18, + # "bbox": [258.15,41.29,348.26,243.78], + # "score": 0.236}, ...] + results = [] + for cls_ind, cls in enumerate(self.classes): + if cls == '__background__': + continue + print 'Collecting {} results ({:d}/{:d})'.format(cls, cls_ind, + self.num_classes - 1) + coco_cat_id = self._class_to_coco_cat_id[cls] + results.extend(self._coco_results_one_category(all_boxes[cls_ind], + coco_cat_id)) + print 'Writing results json to {}'.format(res_file) + with open(res_file, 'w') as fid: + json.dump(results, fid) + + def evaluate_detections(self, all_boxes, output_dir): + res_file = osp.join(output_dir, ('detections_' + + self._image_set + + self._year + + '_results')) + if self.config['use_salt']: + res_file += '_{}'.format(str(uuid.uuid4())) + res_file += '.json' + self._write_coco_results_file(all_boxes, res_file) + # Only do evaluation on non-test sets + if self._image_set.find('test') == -1: + self._do_detection_eval(res_file, output_dir) + # Optionally cleanup results json file + if self.config['cleanup']: + os.remove(res_file) + + def competition_mode(self, on): + if on: + self.config['use_salt'] = False + self.config['cleanup'] = False + else: + self.config['use_salt'] = True + self.config['cleanup'] = True diff --git a/lib/datasets/ds_utils.py b/lib/datasets/ds_utils.py new file mode 100644 index 0000000..f66a7f6 --- /dev/null +++ b/lib/datasets/ds_utils.py @@ -0,0 +1,41 @@ +# -------------------------------------------------------- +# Fast/er R-CNN +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import numpy as np + +def unique_boxes(boxes, scale=1.0): + """Return indices of unique boxes.""" + v = np.array([1, 1e3, 1e6, 1e9]) + hashes = np.round(boxes * scale).dot(v) + _, index = np.unique(hashes, return_index=True) + return np.sort(index) + +def xywh_to_xyxy(boxes): + """Convert [x y w h] box format to [x1 y1 x2 y2] format.""" + return np.hstack((boxes[:, 0:2], boxes[:, 0:2] + boxes[:, 2:4] - 1)) + +def xyxy_to_xywh(boxes): + """Convert [x1 y1 x2 y2] box format to [x y w h] format.""" + return np.hstack((boxes[:, 0:2], boxes[:, 2:4] - boxes[:, 0:2] + 1)) + +def validate_boxes(boxes, width=0, height=0): + """Check that a set of boxes are valid.""" + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + y2 = boxes[:, 3] + assert (x1 >= 0).all() + assert (y1 >= 0).all() + assert (x2 >= x1).all() + assert (y2 >= y1).all() + assert (x2 < width).all() + assert (y2 < height).all() + +def filter_small_boxes(boxes, min_size): + w = boxes[:, 2] - boxes[:, 0] + h = boxes[:, 3] - boxes[:, 1] + keep = np.where((w >= min_size) & (h > min_size))[0] + return keep diff --git a/lib/datasets/factory.py b/lib/datasets/factory.py new file mode 100644 index 0000000..339ea13 --- /dev/null +++ b/lib/datasets/factory.py @@ -0,0 +1,43 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Factory method for easily getting imdbs by name.""" + +__sets = {} + +from datasets.pascal_voc import pascal_voc +from datasets.coco import coco +import numpy as np + +# Set up voc__ using selective search "fast" mode +for year in ['2007', '2012', '0712']: + for split in ['train', 'val', 'trainval', 'test']: + name = 'voc_{}_{}'.format(year, split) + __sets[name] = (lambda split=split, year=year: pascal_voc(split, year)) + + +# Set up coco_2014_ +for year in ['2014']: + for split in ['train', 'val', 'minival', 'valminusminival']: + name = 'coco_{}_{}'.format(year, split) + __sets[name] = (lambda split=split, year=year: coco(split, year)) + +# Set up coco_2015_ +for year in ['2015']: + for split in ['test', 'test-dev']: + name = 'coco_{}_{}'.format(year, split) + __sets[name] = (lambda split=split, year=year: coco(split, year)) + +def get_imdb(name): + """Get an imdb (image database) by name.""" + if not __sets.has_key(name): + raise KeyError('Unknown dataset: {}'.format(name)) + return __sets[name]() + +def list_imdbs(): + """List all registered imdbs.""" + return __sets.keys() diff --git a/lib/datasets/imdb.py b/lib/datasets/imdb.py new file mode 100644 index 0000000..b56bf0a --- /dev/null +++ b/lib/datasets/imdb.py @@ -0,0 +1,253 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import os +import os.path as osp +import PIL +from utils.cython_bbox import bbox_overlaps +import numpy as np +import scipy.sparse +from fast_rcnn.config import cfg + +class imdb(object): + """Image database.""" + + def __init__(self, name): + self._name = name + self._num_classes = 0 + self._classes = [] + self._image_index = [] + self._obj_proposer = 'selective_search' + self._roidb = None + self._roidb_handler = self.default_roidb + # Use this dict for storing dataset specific config options + self.config = {} + + @property + def name(self): + return self._name + + @property + def num_classes(self): + return len(self._classes) + + @property + def classes(self): + return self._classes + + @property + def image_index(self): + return self._image_index + + @property + def roidb_handler(self): + return self._roidb_handler + + @roidb_handler.setter + def roidb_handler(self, val): + self._roidb_handler = val + + def set_proposal_method(self, method): + method = eval('self.' + method + '_roidb') + self.roidb_handler = method + + @property + def roidb(self): + # A roidb is a list of dictionaries, each with the following keys: + # boxes + # gt_overlaps + # gt_classes + # flipped + if self._roidb is not None: + return self._roidb + self._roidb = self.roidb_handler() + return self._roidb + + @property + def cache_path(self): + cache_path = osp.abspath(osp.join(cfg.DATA_DIR, 'cache')) + if not os.path.exists(cache_path): + os.makedirs(cache_path) + return cache_path + + @property + def num_images(self): + return len(self.image_index) + + def image_path_at(self, i): + raise NotImplementedError + + def default_roidb(self): + raise NotImplementedError + + def evaluate_detections(self, all_boxes, output_dir=None): + """ + all_boxes is a list of length number-of-classes. + Each list element is a list of length number-of-images. + Each of those list elements is either an empty list [] + or a numpy array of detection. + + all_boxes[class][image] = [] or np.array of shape #dets x 5 + """ + raise NotImplementedError + + def _get_widths(self): + return [PIL.Image.open(self.image_path_at(i)).size[0] + for i in xrange(self.num_images)] + + def append_flipped_images(self): + num_images = self.num_images + widths = self._get_widths() + for i in xrange(num_images): + boxes = self.roidb[i]['boxes'].copy() + oldx1 = boxes[:, 0].copy() + oldx2 = boxes[:, 2].copy() + boxes[:, 0] = widths[i] - oldx2 - 1 + boxes[:, 2] = widths[i] - oldx1 - 1 + assert (boxes[:, 2] >= boxes[:, 0]).all() + entry = {'boxes' : boxes, + 'gt_overlaps' : self.roidb[i]['gt_overlaps'], + 'gt_classes' : self.roidb[i]['gt_classes'], + 'flipped' : True} + self.roidb.append(entry) + self._image_index = self._image_index * 2 + + def evaluate_recall(self, candidate_boxes=None, thresholds=None, + area='all', limit=None): + """Evaluate detection proposal recall metrics. + + Returns: + results: dictionary of results with keys + 'ar': average recall + 'recalls': vector recalls at each IoU overlap threshold + 'thresholds': vector of IoU overlap thresholds + 'gt_overlaps': vector of all ground-truth overlaps + """ + # Record max overlap value for each gt box + # Return vector of overlap values + areas = { 'all': 0, 'small': 1, 'medium': 2, 'large': 3, + '96-128': 4, '128-256': 5, '256-512': 6, '512-inf': 7} + area_ranges = [ [0**2, 1e5**2], # all + [0**2, 32**2], # small + [32**2, 96**2], # medium + [96**2, 1e5**2], # large + [96**2, 128**2], # 96-128 + [128**2, 256**2], # 128-256 + [256**2, 512**2], # 256-512 + [512**2, 1e5**2], # 512-inf + ] + assert areas.has_key(area), 'unknown area range: {}'.format(area) + area_range = area_ranges[areas[area]] + gt_overlaps = np.zeros(0) + num_pos = 0 + for i in xrange(self.num_images): + # Checking for max_overlaps == 1 avoids including crowd annotations + # (...pretty hacking :/) + max_gt_overlaps = self.roidb[i]['gt_overlaps'].toarray().max(axis=1) + gt_inds = np.where((self.roidb[i]['gt_classes'] > 0) & + (max_gt_overlaps == 1))[0] + gt_boxes = self.roidb[i]['boxes'][gt_inds, :] + gt_areas = self.roidb[i]['seg_areas'][gt_inds] + valid_gt_inds = np.where((gt_areas >= area_range[0]) & + (gt_areas <= area_range[1]))[0] + gt_boxes = gt_boxes[valid_gt_inds, :] + num_pos += len(valid_gt_inds) + + if candidate_boxes is None: + # If candidate_boxes is not supplied, the default is to use the + # non-ground-truth boxes from this roidb + non_gt_inds = np.where(self.roidb[i]['gt_classes'] == 0)[0] + boxes = self.roidb[i]['boxes'][non_gt_inds, :] + else: + boxes = candidate_boxes[i] + if boxes.shape[0] == 0: + continue + if limit is not None and boxes.shape[0] > limit: + boxes = boxes[:limit, :] + + overlaps = bbox_overlaps(boxes.astype(np.float), + gt_boxes.astype(np.float)) + + _gt_overlaps = np.zeros((gt_boxes.shape[0])) + for j in xrange(gt_boxes.shape[0]): + # find which proposal box maximally covers each gt box + argmax_overlaps = overlaps.argmax(axis=0) + # and get the iou amount of coverage for each gt box + max_overlaps = overlaps.max(axis=0) + # find which gt box is 'best' covered (i.e. 'best' = most iou) + gt_ind = max_overlaps.argmax() + gt_ovr = max_overlaps.max() + assert(gt_ovr >= 0) + # find the proposal box that covers the best covered gt box + box_ind = argmax_overlaps[gt_ind] + # record the iou coverage of this gt box + _gt_overlaps[j] = overlaps[box_ind, gt_ind] + assert(_gt_overlaps[j] == gt_ovr) + # mark the proposal box and the gt box as used + overlaps[box_ind, :] = -1 + overlaps[:, gt_ind] = -1 + # append recorded iou coverage level + gt_overlaps = np.hstack((gt_overlaps, _gt_overlaps)) + + gt_overlaps = np.sort(gt_overlaps) + if thresholds is None: + step = 0.05 + thresholds = np.arange(0.5, 0.95 + 1e-5, step) + recalls = np.zeros_like(thresholds) + # compute recall for each iou threshold + for i, t in enumerate(thresholds): + recalls[i] = (gt_overlaps >= t).sum() / float(num_pos) + # ar = 2 * np.trapz(recalls, thresholds) + ar = recalls.mean() + return {'ar': ar, 'recalls': recalls, 'thresholds': thresholds, + 'gt_overlaps': gt_overlaps} + + def create_roidb_from_box_list(self, box_list, gt_roidb): + assert len(box_list) == self.num_images, \ + 'Number of boxes must match number of ground-truth images' + roidb = [] + for i in xrange(self.num_images): + boxes = box_list[i] + num_boxes = boxes.shape[0] + overlaps = np.zeros((num_boxes, self.num_classes), dtype=np.float32) + + if gt_roidb is not None and gt_roidb[i]['boxes'].size > 0: + gt_boxes = gt_roidb[i]['boxes'] + gt_classes = gt_roidb[i]['gt_classes'] + gt_overlaps = bbox_overlaps(boxes.astype(np.float), + gt_boxes.astype(np.float)) + argmaxes = gt_overlaps.argmax(axis=1) + maxes = gt_overlaps.max(axis=1) + I = np.where(maxes > 0)[0] + overlaps[I, gt_classes[argmaxes[I]]] = maxes[I] + + overlaps = scipy.sparse.csr_matrix(overlaps) + roidb.append({ + 'boxes' : boxes, + 'gt_classes' : np.zeros((num_boxes,), dtype=np.int32), + 'gt_overlaps' : overlaps, + 'flipped' : False, + 'seg_areas' : np.zeros((num_boxes,), dtype=np.float32), + }) + return roidb + + @staticmethod + def merge_roidbs(a, b): + assert len(a) == len(b) + for i in xrange(len(a)): + a[i]['boxes'] = np.vstack((a[i]['boxes'], b[i]['boxes'])) + a[i]['gt_classes'] = np.hstack((a[i]['gt_classes'], + b[i]['gt_classes'])) + a[i]['gt_overlaps'] = scipy.sparse.vstack([a[i]['gt_overlaps'], + b[i]['gt_overlaps']]) + a[i]['seg_areas'] = np.hstack((a[i]['seg_areas'], + b[i]['seg_areas'])) + return a + + def competition_mode(self, on): + """Turn competition mode on or off.""" + pass diff --git a/lib/datasets/pascal_voc.py b/lib/datasets/pascal_voc.py new file mode 100644 index 0000000..b55f2f6 --- /dev/null +++ b/lib/datasets/pascal_voc.py @@ -0,0 +1,344 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import os +from datasets.imdb import imdb +import datasets.ds_utils as ds_utils +import xml.etree.ElementTree as ET +import numpy as np +import scipy.sparse +import scipy.io as sio +import utils.cython_bbox +import cPickle +import subprocess +import uuid +from voc_eval import voc_eval +from fast_rcnn.config import cfg + +class pascal_voc(imdb): + def __init__(self, image_set, year, devkit_path=None): + imdb.__init__(self, 'voc_' + year + '_' + image_set) + self._year = year + self._image_set = image_set + self._devkit_path = self._get_default_path() if devkit_path is None \ + else devkit_path + self._data_path = os.path.join(self._devkit_path, 'VOC' + self._year) + self._classes = ('__background__', # always index 0 + 'aeroplane', 'bicycle', 'bird', 'boat', + 'bottle', 'bus', 'car', 'cat', 'chair', + 'cow', 'diningtable', 'dog', 'horse', + 'motorbike', 'person', 'pottedplant', + 'sheep', 'sofa', 'train', 'tvmonitor') + self._class_to_ind = dict(zip(self.classes, xrange(self.num_classes))) + self._image_ext = '.jpg' + self._image_index = self._load_image_set_index() + # Default to roidb handler + self._roidb_handler = self.selective_search_roidb + self._salt = str(uuid.uuid4()) + self._comp_id = 'comp4' + + # PASCAL specific config options + self.config = {'cleanup' : True, + 'use_salt' : True, + 'use_diff' : False, + 'matlab_eval' : False, + 'rpn_file' : None, + 'min_size' : 2} + + assert os.path.exists(self._devkit_path), \ + 'VOCdevkit path does not exist: {}'.format(self._devkit_path) + assert os.path.exists(self._data_path), \ + 'Path does not exist: {}'.format(self._data_path) + + def image_path_at(self, i): + """ + Return the absolute path to image i in the image sequence. + """ + return self.image_path_from_index(self._image_index[i]) + + def image_path_from_index(self, index): + """ + Construct an image path from the image's "index" identifier. + """ + image_path = os.path.join(self._data_path, 'JPEGImages', + index + self._image_ext) + assert os.path.exists(image_path), \ + 'Path does not exist: {}'.format(image_path) + return image_path + + def _load_image_set_index(self): + """ + Load the indexes listed in this dataset's image set file. + """ + # Example path to image set file: + # self._devkit_path + /VOCdevkit2007/VOC2007/ImageSets/Main/val.txt + image_set_file = os.path.join(self._data_path, 'ImageSets', 'Main', + self._image_set + '.txt') + assert os.path.exists(image_set_file), \ + 'Path does not exist: {}'.format(image_set_file) + with open(image_set_file) as f: + image_index = [x.strip() for x in f.readlines()] + return image_index + + def _get_default_path(self): + """ + Return the default path where PASCAL VOC is expected to be installed. + """ + return os.path.join(cfg.DATA_DIR, 'VOCdevkit' + self._year) + + def gt_roidb(self): + """ + Return the database of ground-truth regions of interest. + + This function loads/saves from/to a cache file to speed up future calls. + """ + cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl') + if os.path.exists(cache_file): + with open(cache_file, 'rb') as fid: + roidb = cPickle.load(fid) + print '{} gt roidb loaded from {}'.format(self.name, cache_file) + return roidb + + gt_roidb = [self._load_pascal_annotation(index) + for index in self.image_index] + with open(cache_file, 'wb') as fid: + cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL) + print 'wrote gt roidb to {}'.format(cache_file) + + return gt_roidb + + def selective_search_roidb(self): + """ + Return the database of selective search regions of interest. + Ground-truth ROIs are also included. + + This function loads/saves from/to a cache file to speed up future calls. + """ + cache_file = os.path.join(self.cache_path, + self.name + '_selective_search_roidb.pkl') + + if os.path.exists(cache_file): + with open(cache_file, 'rb') as fid: + roidb = cPickle.load(fid) + print '{} ss roidb loaded from {}'.format(self.name, cache_file) + return roidb + + if int(self._year) == 2007 or self._image_set != 'test': + gt_roidb = self.gt_roidb() + ss_roidb = self._load_selective_search_roidb(gt_roidb) + roidb = imdb.merge_roidbs(gt_roidb, ss_roidb) + else: + roidb = self._load_selective_search_roidb(None) + with open(cache_file, 'wb') as fid: + cPickle.dump(roidb, fid, cPickle.HIGHEST_PROTOCOL) + print 'wrote ss roidb to {}'.format(cache_file) + + return roidb + + def rpn_roidb(self): + if int(self._year) == 2007 or self._image_set != 'test': + gt_roidb = self.gt_roidb() + rpn_roidb = self._load_rpn_roidb(gt_roidb) + roidb = imdb.merge_roidbs(gt_roidb, rpn_roidb) + else: + roidb = self._load_rpn_roidb(None) + + return roidb + + def _load_rpn_roidb(self, gt_roidb): + filename = self.config['rpn_file'] + print 'loading {}'.format(filename) + assert os.path.exists(filename), \ + 'rpn data not found at: {}'.format(filename) + with open(filename, 'rb') as f: + box_list = cPickle.load(f) + return self.create_roidb_from_box_list(box_list, gt_roidb) + + def _load_selective_search_roidb(self, gt_roidb): + filename = os.path.abspath(os.path.join(cfg.DATA_DIR, + 'selective_search_data', + self.name + '.mat')) + assert os.path.exists(filename), \ + 'Selective search data not found at: {}'.format(filename) + raw_data = sio.loadmat(filename)['boxes'].ravel() + + box_list = [] + for i in xrange(raw_data.shape[0]): + boxes = raw_data[i][:, (1, 0, 3, 2)] - 1 + keep = ds_utils.unique_boxes(boxes) + boxes = boxes[keep, :] + keep = ds_utils.filter_small_boxes(boxes, self.config['min_size']) + boxes = boxes[keep, :] + box_list.append(boxes) + + return self.create_roidb_from_box_list(box_list, gt_roidb) + + def _load_pascal_annotation(self, index): + """ + Load image and bounding boxes info from XML file in the PASCAL VOC + format. + """ + filename = os.path.join(self._data_path, 'Annotations', index + '.xml') + tree = ET.parse(filename) + objs = tree.findall('object') + if not self.config['use_diff']: + # Exclude the samples labeled as difficult + non_diff_objs = [ + obj for obj in objs if int(obj.find('difficult').text) == 0] + # if len(non_diff_objs) != len(objs): + # print 'Removed {} difficult objects'.format( + # len(objs) - len(non_diff_objs)) + objs = non_diff_objs + num_objs = len(objs) + + boxes = np.zeros((num_objs, 4), dtype=np.uint16) + gt_classes = np.zeros((num_objs), dtype=np.int32) + overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32) + # "Seg" area for pascal is just the box area + seg_areas = np.zeros((num_objs), dtype=np.float32) + + # Load object bounding boxes into a data frame. + for ix, obj in enumerate(objs): + bbox = obj.find('bndbox') + # Make pixel indexes 0-based + x1 = float(bbox.find('xmin').text) - 1 + y1 = float(bbox.find('ymin').text) - 1 + x2 = float(bbox.find('xmax').text) - 1 + y2 = float(bbox.find('ymax').text) - 1 + cls = self._class_to_ind[obj.find('name').text.lower().strip()] + boxes[ix, :] = [x1, y1, x2, y2] + gt_classes[ix] = cls + overlaps[ix, cls] = 1.0 + seg_areas[ix] = (x2 - x1 + 1) * (y2 - y1 + 1) + + overlaps = scipy.sparse.csr_matrix(overlaps) + + return {'boxes' : boxes, + 'gt_classes': gt_classes, + 'gt_overlaps' : overlaps, + 'flipped' : False, + 'seg_areas' : seg_areas} + + def _get_comp_id(self): + comp_id = (self._comp_id + '_' + self._salt if self.config['use_salt'] + else self._comp_id) + return comp_id + + def _get_voc_results_file_template(self): + # VOCdevkit/results/VOC2007/Main/_det_test_aeroplane.txt + filename = self._get_comp_id() + '_det_' + self._image_set + '_{:s}.txt' + path = os.path.join( + self._devkit_path, + 'results', + 'VOC' + self._year, + 'Main', + filename) + return path + + def _write_voc_results_file(self, all_boxes): + for cls_ind, cls in enumerate(self.classes): + if cls == '__background__': + continue + print 'Writing {} VOC results file'.format(cls) + filename = self._get_voc_results_file_template().format(cls) + with open(filename, 'wt') as f: + for im_ind, index in enumerate(self.image_index): + dets = all_boxes[cls_ind][im_ind] + if dets == []: + continue + # the VOCdevkit expects 1-based indices + for k in xrange(dets.shape[0]): + f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'. + format(index, dets[k, -1], + dets[k, 0] + 1, dets[k, 1] + 1, + dets[k, 2] + 1, dets[k, 3] + 1)) + + def _do_python_eval(self, output_dir = 'output'): + annopath = os.path.join( + self._devkit_path, + 'VOC' + self._year, + 'Annotations', + '{:s}.xml') + imagesetfile = os.path.join( + self._devkit_path, + 'VOC' + self._year, + 'ImageSets', + 'Main', + self._image_set + '.txt') + cachedir = os.path.join(self._devkit_path, 'annotations_cache') + aps = [] + # The PASCAL VOC metric changed in 2010 + use_07_metric = True if int(self._year) < 2010 else False + print 'VOC07 metric? ' + ('Yes' if use_07_metric else 'No') + if not os.path.isdir(output_dir): + os.mkdir(output_dir) + for i, cls in enumerate(self._classes): + if cls == '__background__': + continue + filename = self._get_voc_results_file_template().format(cls) + rec, prec, ap = voc_eval( + filename, annopath, imagesetfile, cls, cachedir, ovthresh=0.5, + use_07_metric=use_07_metric) + aps += [ap] + print('AP for {} = {:.4f}'.format(cls, ap)) + with open(os.path.join(output_dir, cls + '_pr.pkl'), 'w') as f: + cPickle.dump({'rec': rec, 'prec': prec, 'ap': ap}, f) + print('Mean AP = {:.4f}'.format(np.mean(aps))) + print('~~~~~~~~') + print('Results:') + for ap in aps: + print('{:.3f}'.format(ap)) + print('{:.3f}'.format(np.mean(aps))) + print('~~~~~~~~') + print('') + print('--------------------------------------------------------------') + print('Results computed with the **unofficial** Python eval code.') + print('Results should be very close to the official MATLAB eval code.') + print('Recompute with `./tools/reval.py --matlab ...` for your paper.') + print('-- Thanks, The Management') + print('--------------------------------------------------------------') + + def _do_matlab_eval(self, output_dir='output'): + print '-----------------------------------------------------' + print 'Computing results with the official MATLAB eval code.' + print '-----------------------------------------------------' + path = os.path.join(cfg.ROOT_DIR, 'lib', 'datasets', + 'VOCdevkit-matlab-wrapper') + cmd = 'cd {} && '.format(path) + cmd += '{:s} -nodisplay -nodesktop '.format(cfg.MATLAB) + cmd += '-r "dbstop if error; ' + cmd += 'voc_eval(\'{:s}\',\'{:s}\',\'{:s}\',\'{:s}\'); quit;"' \ + .format(self._devkit_path, self._get_comp_id(), + self._image_set, output_dir) + print('Running:\n{}'.format(cmd)) + status = subprocess.call(cmd, shell=True) + + def evaluate_detections(self, all_boxes, output_dir): + self._write_voc_results_file(all_boxes) + self._do_python_eval(output_dir) + if self.config['matlab_eval']: + self._do_matlab_eval(output_dir) + if self.config['cleanup']: + for cls in self._classes: + if cls == '__background__': + continue + filename = self._get_voc_results_file_template().format(cls) + os.remove(filename) + + def competition_mode(self, on): + if on: + self.config['use_salt'] = False + self.config['cleanup'] = False + else: + self.config['use_salt'] = True + self.config['cleanup'] = True + +if __name__ == '__main__': + from datasets.pascal_voc import pascal_voc + d = pascal_voc('trainval', '2007') + res = d.roidb + from IPython import embed; embed() diff --git a/lib/datasets/tools/mcg_munge.py b/lib/datasets/tools/mcg_munge.py new file mode 100644 index 0000000..1392aa3 --- /dev/null +++ b/lib/datasets/tools/mcg_munge.py @@ -0,0 +1,38 @@ +import os +import sys + +"""Hacky tool to convert file system layout of MCG boxes downloaded from +http://www.eecs.berkeley.edu/Research/Projects/CS/vision/grouping/mcg/ +so that it's consistent with those computed by Jan Hosang (see: +http://www.mpi-inf.mpg.de/departments/computer-vision-and-multimodal- + computing/research/object-recognition-and-scene-understanding/how- + good-are-detection-proposals-really/) + +NB: Boxes from the MCG website are in (y1, x1, y2, x2) order. +Boxes from Hosang et al. are in (x1, y1, x2, y2) order. +""" + +def munge(src_dir): + # stored as: ./MCG-COCO-val2014-boxes/COCO_val2014_000000193401.mat + # want: ./MCG/mat/COCO_val2014_0/COCO_val2014_000000141/COCO_val2014_000000141334.mat + + files = os.listdir(src_dir) + for fn in files: + base, ext = os.path.splitext(fn) + # first 14 chars / first 22 chars / all chars + .mat + # COCO_val2014_0/COCO_val2014_000000447/COCO_val2014_000000447991.mat + first = base[:14] + second = base[:22] + dst_dir = os.path.join('MCG', 'mat', first, second) + if not os.path.exists(dst_dir): + os.makedirs(dst_dir) + src = os.path.join(src_dir, fn) + dst = os.path.join(dst_dir, fn) + print 'MV: {} -> {}'.format(src, dst) + os.rename(src, dst) + +if __name__ == '__main__': + # src_dir should look something like: + # src_dir = 'MCG-COCO-val2014-boxes' + src_dir = sys.argv[1] + munge(src_dir) diff --git a/lib/datasets/voc_eval.py b/lib/datasets/voc_eval.py new file mode 100644 index 0000000..8d0a830 --- /dev/null +++ b/lib/datasets/voc_eval.py @@ -0,0 +1,200 @@ +# -------------------------------------------------------- +# Fast/er R-CNN +# Licensed under The MIT License [see LICENSE for details] +# Written by Bharath Hariharan +# -------------------------------------------------------- + +import xml.etree.ElementTree as ET +import os +import cPickle +import numpy as np + +def parse_rec(filename): + """ Parse a PASCAL VOC xml file """ + tree = ET.parse(filename) + objects = [] + for obj in tree.findall('object'): + obj_struct = {} + obj_struct['name'] = obj.find('name').text + obj_struct['pose'] = obj.find('pose').text + obj_struct['truncated'] = int(obj.find('truncated').text) + obj_struct['difficult'] = int(obj.find('difficult').text) + bbox = obj.find('bndbox') + obj_struct['bbox'] = [int(bbox.find('xmin').text), + int(bbox.find('ymin').text), + int(bbox.find('xmax').text), + int(bbox.find('ymax').text)] + objects.append(obj_struct) + + return objects + +def voc_ap(rec, prec, use_07_metric=False): + """ ap = voc_ap(rec, prec, [use_07_metric]) + Compute VOC AP given precision and recall. + If use_07_metric is true, uses the + VOC 07 11 point method (default:False). + """ + if use_07_metric: + # 11 point metric + ap = 0. + for t in np.arange(0., 1.1, 0.1): + if np.sum(rec >= t) == 0: + p = 0 + else: + p = np.max(prec[rec >= t]) + ap = ap + p / 11. + else: + # correct AP calculation + # first append sentinel values at the end + mrec = np.concatenate(([0.], rec, [1.])) + mpre = np.concatenate(([0.], prec, [0.])) + + # compute the precision envelope + for i in range(mpre.size - 1, 0, -1): + mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) + + # to calculate area under PR curve, look for points + # where X axis (recall) changes value + i = np.where(mrec[1:] != mrec[:-1])[0] + + # and sum (\Delta recall) * prec + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) + return ap + +def voc_eval(detpath, + annopath, + imagesetfile, + classname, + cachedir, + ovthresh=0.5, + use_07_metric=False): + """rec, prec, ap = voc_eval(detpath, + annopath, + imagesetfile, + classname, + [ovthresh], + [use_07_metric]) + + Top level function that does the PASCAL VOC evaluation. + + detpath: Path to detections + detpath.format(classname) should produce the detection results file. + annopath: Path to annotations + annopath.format(imagename) should be the xml annotations file. + imagesetfile: Text file containing the list of images, one image per line. + classname: Category name (duh) + cachedir: Directory for caching the annotations + [ovthresh]: Overlap threshold (default = 0.5) + [use_07_metric]: Whether to use VOC07's 11 point AP computation + (default False) + """ + # assumes detections are in detpath.format(classname) + # assumes annotations are in annopath.format(imagename) + # assumes imagesetfile is a text file with each line an image name + # cachedir caches the annotations in a pickle file + + # first load gt + if not os.path.isdir(cachedir): + os.mkdir(cachedir) + cachefile = os.path.join(cachedir, 'annots.pkl') + # read list of images + with open(imagesetfile, 'r') as f: + lines = f.readlines() + imagenames = [x.strip() for x in lines] + + if not os.path.isfile(cachefile): + # load annots + recs = {} + for i, imagename in enumerate(imagenames): + recs[imagename] = parse_rec(annopath.format(imagename)) + if i % 100 == 0: + print 'Reading annotation for {:d}/{:d}'.format( + i + 1, len(imagenames)) + # save + print 'Saving cached annotations to {:s}'.format(cachefile) + with open(cachefile, 'w') as f: + cPickle.dump(recs, f) + else: + # load + with open(cachefile, 'r') as f: + recs = cPickle.load(f) + + # extract gt objects for this class + class_recs = {} + npos = 0 + for imagename in imagenames: + R = [obj for obj in recs[imagename] if obj['name'] == classname] + bbox = np.array([x['bbox'] for x in R]) + difficult = np.array([x['difficult'] for x in R]).astype(np.bool) + det = [False] * len(R) + npos = npos + sum(~difficult) + class_recs[imagename] = {'bbox': bbox, + 'difficult': difficult, + 'det': det} + + # read dets + detfile = detpath.format(classname) + with open(detfile, 'r') as f: + lines = f.readlines() + + splitlines = [x.strip().split(' ') for x in lines] + image_ids = [x[0] for x in splitlines] + confidence = np.array([float(x[1]) for x in splitlines]) + BB = np.array([[float(z) for z in x[2:]] for x in splitlines]) + + # sort by confidence + sorted_ind = np.argsort(-confidence) + sorted_scores = np.sort(-confidence) + BB = BB[sorted_ind, :] + image_ids = [image_ids[x] for x in sorted_ind] + + # go down dets and mark TPs and FPs + nd = len(image_ids) + tp = np.zeros(nd) + fp = np.zeros(nd) + for d in range(nd): + R = class_recs[image_ids[d]] + bb = BB[d, :].astype(float) + ovmax = -np.inf + BBGT = R['bbox'].astype(float) + + if BBGT.size > 0: + # compute overlaps + # intersection + ixmin = np.maximum(BBGT[:, 0], bb[0]) + iymin = np.maximum(BBGT[:, 1], bb[1]) + ixmax = np.minimum(BBGT[:, 2], bb[2]) + iymax = np.minimum(BBGT[:, 3], bb[3]) + iw = np.maximum(ixmax - ixmin + 1., 0.) + ih = np.maximum(iymax - iymin + 1., 0.) + inters = iw * ih + + # union + uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) + + (BBGT[:, 2] - BBGT[:, 0] + 1.) * + (BBGT[:, 3] - BBGT[:, 1] + 1.) - inters) + + overlaps = inters / uni + ovmax = np.max(overlaps) + jmax = np.argmax(overlaps) + + if ovmax > ovthresh: + if not R['difficult'][jmax]: + if not R['det'][jmax]: + tp[d] = 1. + R['det'][jmax] = 1 + else: + fp[d] = 1. + else: + fp[d] = 1. + + # compute precision recall + fp = np.cumsum(fp) + tp = np.cumsum(tp) + rec = tp / float(npos) + # avoid divide by zero in case the first detection matches a difficult + # ground truth + prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps) + ap = voc_ap(rec, prec, use_07_metric) + + return rec, prec, ap diff --git a/lib/fast_rcnn/__init__.py b/lib/fast_rcnn/__init__.py new file mode 100644 index 0000000..7ba6a65 --- /dev/null +++ b/lib/fast_rcnn/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- diff --git a/lib/fast_rcnn/bbox_transform.py b/lib/fast_rcnn/bbox_transform.py new file mode 100644 index 0000000..c83109c --- /dev/null +++ b/lib/fast_rcnn/bbox_transform.py @@ -0,0 +1,75 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import numpy as np + +def bbox_transform(ex_rois, gt_rois): + ex_widths = ex_rois[:, 2] - ex_rois[:, 0] + 1.0 + ex_heights = ex_rois[:, 3] - ex_rois[:, 1] + 1.0 + ex_ctr_x = ex_rois[:, 0] + 0.5 * ex_widths + ex_ctr_y = ex_rois[:, 1] + 0.5 * ex_heights + + gt_widths = gt_rois[:, 2] - gt_rois[:, 0] + 1.0 + gt_heights = gt_rois[:, 3] - gt_rois[:, 1] + 1.0 + gt_ctr_x = gt_rois[:, 0] + 0.5 * gt_widths + gt_ctr_y = gt_rois[:, 1] + 0.5 * gt_heights + + targets_dx = (gt_ctr_x - ex_ctr_x) / ex_widths + targets_dy = (gt_ctr_y - ex_ctr_y) / ex_heights + targets_dw = np.log(gt_widths / ex_widths) + targets_dh = np.log(gt_heights / ex_heights) + + targets = np.vstack( + (targets_dx, targets_dy, targets_dw, targets_dh)).transpose() + return targets + +def bbox_transform_inv(boxes, deltas): + if boxes.shape[0] == 0: + return np.zeros((0, deltas.shape[1]), dtype=deltas.dtype) + boxes = boxes.astype(deltas.dtype, copy=False) + + widths = boxes[:, 2] - boxes[:, 0] + 1.0 + heights = boxes[:, 3] - boxes[:, 1] + 1.0 + ctr_x = boxes[:, 0] + 0.5 * widths + ctr_y = boxes[:, 1] + 0.5 * heights + + dx = deltas[:, 0::4] + dy = deltas[:, 1::4] + dw = deltas[:, 2::4] + dh = deltas[:, 3::4] + + pred_ctr_x = dx * widths[:, np.newaxis] + ctr_x[:, np.newaxis] + pred_ctr_y = dy * heights[:, np.newaxis] + ctr_y[:, np.newaxis] + pred_w = np.exp(dw) * widths[:, np.newaxis] + pred_h = np.exp(dh) * heights[:, np.newaxis] + + pred_boxes = np.zeros(deltas.shape, dtype=deltas.dtype) + # x1 + pred_boxes[:, 0::4] = pred_ctr_x - 0.5 * pred_w + # y1 + pred_boxes[:, 1::4] = pred_ctr_y - 0.5 * pred_h + # x2 + pred_boxes[:, 2::4] = pred_ctr_x + 0.5 * pred_w + # y2 + pred_boxes[:, 3::4] = pred_ctr_y + 0.5 * pred_h + + return pred_boxes + +def clip_boxes(boxes, im_shape): + """ + Clip boxes to image boundaries. + """ + + # x1 >= 0 + boxes[:, 0::4] = np.maximum(np.minimum(boxes[:, 0::4], im_shape[1] - 1), 0) + # y1 >= 0 + boxes[:, 1::4] = np.maximum(np.minimum(boxes[:, 1::4], im_shape[0] - 1), 0) + # x2 < im_shape[1] + boxes[:, 2::4] = np.maximum(np.minimum(boxes[:, 2::4], im_shape[1] - 1), 0) + # y2 < im_shape[0] + boxes[:, 3::4] = np.maximum(np.minimum(boxes[:, 3::4], im_shape[0] - 1), 0) + return boxes diff --git a/lib/fast_rcnn/config.py b/lib/fast_rcnn/config.py new file mode 100644 index 0000000..1751856 --- /dev/null +++ b/lib/fast_rcnn/config.py @@ -0,0 +1,290 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Fast R-CNN config system. + +This file specifies default config options for Fast R-CNN. You should not +change values in this file. Instead, you should write a config file (in yaml) +and use cfg_from_file(yaml_file) to load it and override the default options. + +Most tools in $ROOT/tools take a --cfg option to specify an override file. + - See tools/{train,test}_net.py for example code that uses cfg_from_file() + - See experiments/cfgs/*.yml for example YAML config override files +""" + +import os +import os.path as osp +import numpy as np +# `pip install easydict` if you don't have it +from easydict import EasyDict as edict + +__C = edict() +# Consumers can get config by: +# from fast_rcnn_config import cfg +cfg = __C + +# +# Training options +# + +__C.TRAIN = edict() + +# Scales to use during training (can list multiple scales) +# Each scale is the pixel size of an image's shortest side +__C.TRAIN.SCALES = (600,) + +# Max pixel size of the longest side of a scaled input image +__C.TRAIN.MAX_SIZE = 1000 + +# Images to use per minibatch +__C.TRAIN.IMS_PER_BATCH = 2 + +# Minibatch size (number of regions of interest [ROIs]) +__C.TRAIN.BATCH_SIZE = 128 + +# Fraction of minibatch that is labeled foreground (i.e. class > 0) +__C.TRAIN.FG_FRACTION = 0.25 + +# Overlap threshold for a ROI to be considered foreground (if >= FG_THRESH) +__C.TRAIN.FG_THRESH = 0.5 + +# Overlap threshold for a ROI to be considered background (class = 0 if +# overlap in [LO, HI)) +__C.TRAIN.BG_THRESH_HI = 0.5 +__C.TRAIN.BG_THRESH_LO = 0.1 + +# Use horizontally-flipped images during training? +__C.TRAIN.USE_FLIPPED = True + +# Train bounding-box regressors +__C.TRAIN.BBOX_REG = True + +# Overlap required between a ROI and ground-truth box in order for that ROI to +# be used as a bounding-box regression training example +__C.TRAIN.BBOX_THRESH = 0.5 + +# Iterations between snapshots +__C.TRAIN.SNAPSHOT_ITERS = 10000 + +# solver.prototxt specifies the snapshot path prefix, this adds an optional +# infix to yield the path: [_]_iters_XYZ.caffemodel +__C.TRAIN.SNAPSHOT_INFIX = '' + +# Use a prefetch thread in roi_data_layer.layer +# So far I haven't found this useful; likely more engineering work is required +__C.TRAIN.USE_PREFETCH = False + +# Normalize the targets (subtract empirical mean, divide by empirical stddev) +__C.TRAIN.BBOX_NORMALIZE_TARGETS = True +# Deprecated (inside weights) +__C.TRAIN.BBOX_INSIDE_WEIGHTS = (1.0, 1.0, 1.0, 1.0) +# Normalize the targets using "precomputed" (or made up) means and stdevs +# (BBOX_NORMALIZE_TARGETS must also be True) +__C.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED = False +__C.TRAIN.BBOX_NORMALIZE_MEANS = (0.0, 0.0, 0.0, 0.0) +__C.TRAIN.BBOX_NORMALIZE_STDS = (0.1, 0.1, 0.2, 0.2) + +# Train using these proposals +__C.TRAIN.PROPOSAL_METHOD = 'selective_search' + +# Make minibatches from images that have similar aspect ratios (i.e. both +# tall and thin or both short and wide) in order to avoid wasting computation +# on zero-padding. +__C.TRAIN.ASPECT_GROUPING = True + +# Use RPN to detect objects +__C.TRAIN.HAS_RPN = False +# IOU >= thresh: positive example +__C.TRAIN.RPN_POSITIVE_OVERLAP = 0.7 +# IOU < thresh: negative example +__C.TRAIN.RPN_NEGATIVE_OVERLAP = 0.3 +# If an anchor statisfied by positive and negative conditions set to negative +__C.TRAIN.RPN_CLOBBER_POSITIVES = False +# Max number of foreground examples +__C.TRAIN.RPN_FG_FRACTION = 0.5 +# Total number of examples +__C.TRAIN.RPN_BATCHSIZE = 256 +# NMS threshold used on RPN proposals +__C.TRAIN.RPN_NMS_THRESH = 0.7 +# Number of top scoring boxes to keep before apply NMS to RPN proposals +__C.TRAIN.RPN_PRE_NMS_TOP_N = 12000 +# Number of top scoring boxes to keep after applying NMS to RPN proposals +__C.TRAIN.RPN_POST_NMS_TOP_N = 2000 +# Proposal height and width both need to be greater than RPN_MIN_SIZE (at orig image scale) +__C.TRAIN.RPN_MIN_SIZE = 16 +# Deprecated (outside weights) +__C.TRAIN.RPN_BBOX_INSIDE_WEIGHTS = (1.0, 1.0, 1.0, 1.0) +# Give the positive RPN examples weight of p * 1 / {num positives} +# and give negatives a weight of (1 - p) +# Set to -1.0 to use uniform example weighting +__C.TRAIN.RPN_POSITIVE_WEIGHT = -1.0 + +# whether use class aware box or not +__C.TRAIN.AGONISTIC = False + +# +# Testing options +# + +__C.TEST = edict() + +# Scales to use during testing (can list multiple scales) +# Each scale is the pixel size of an image's shortest side +__C.TEST.SCALES = (600,) + +# Max pixel size of the longest side of a scaled input image +__C.TEST.MAX_SIZE = 1000 + +# Overlap threshold used for non-maximum suppression (suppress boxes with +# IoU >= this threshold) +__C.TEST.NMS = 0.3 + +# Experimental: treat the (K+1) units in the cls_score layer as linear +# predictors (trained, eg, with one-vs-rest SVMs). +__C.TEST.SVM = False + +# Test using bounding-box regressors +__C.TEST.BBOX_REG = True + +# Propose boxes +__C.TEST.HAS_RPN = False + +# Test using these proposals +__C.TEST.PROPOSAL_METHOD = 'selective_search' + +## NMS threshold used on RPN proposals +__C.TEST.RPN_NMS_THRESH = 0.7 +## Number of top scoring boxes to keep before apply NMS to RPN proposals +__C.TEST.RPN_PRE_NMS_TOP_N = 6000 +## Number of top scoring boxes to keep after applying NMS to RPN proposals +__C.TEST.RPN_POST_NMS_TOP_N = 300 +# Proposal height and width both need to be greater than RPN_MIN_SIZE (at orig image scale) +__C.TEST.RPN_MIN_SIZE = 16 + +# whether use class aware box or not +__C.TEST.AGONISTIC = False + + +# +# MISC +# + +# The mapping from image coordinates to feature map coordinates might cause +# some boxes that are distinct in image space to become identical in feature +# coordinates. If DEDUP_BOXES > 0, then DEDUP_BOXES is used as the scale factor +# for identifying duplicate boxes. +# 1/16 is correct for {Alex,Caffe}Net, VGG_CNN_M_1024, and VGG16 +__C.DEDUP_BOXES = 1./16. + +# Pixel mean values (BGR order) as a (1, 1, 3) array +# We use the same pixel mean for all networks even though it's not exactly what +# they were trained with +__C.PIXEL_MEANS = np.array([[[102.9801, 115.9465, 122.7717]]]) + +# For reproducibility +__C.RNG_SEED = 3 + +# A small number that's used many times +__C.EPS = 1e-14 + +# Root directory of project +__C.ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..')) + +# Data directory +__C.DATA_DIR = osp.abspath(osp.join(__C.ROOT_DIR, 'data')) + +# Model directory +__C.MODELS_DIR = osp.abspath(osp.join(__C.ROOT_DIR, 'models', 'pascal_voc')) + +# Name (or path to) the matlab executable +__C.MATLAB = 'matlab' + +# Place outputs under an experiments directory +__C.EXP_DIR = 'default' + +# Use GPU implementation of non-maximum suppression +__C.USE_GPU_NMS = True + +# Default GPU device id +__C.GPU_ID = 0 + + +def get_output_dir(imdb, net=None): + """Return the directory where experimental artifacts are placed. + If the directory does not exist, it is created. + + A canonical path is built using the name from an imdb and a network + (if not None). + """ + outdir = osp.abspath(osp.join(__C.ROOT_DIR, 'output', __C.EXP_DIR, imdb.name)) + if net is not None: + outdir = osp.join(outdir, net.name) + if not os.path.exists(outdir): + os.makedirs(outdir) + return outdir + +def _merge_a_into_b(a, b): + """Merge config dictionary a into config dictionary b, clobbering the + options in b whenever they are also specified in a. + """ + if type(a) is not edict: + return + + for k, v in a.iteritems(): + # a must specify keys that are in b + if not b.has_key(k): + raise KeyError('{} is not a valid config key'.format(k)) + + # the types must match, too + old_type = type(b[k]) + if old_type is not type(v): + if isinstance(b[k], np.ndarray): + v = np.array(v, dtype=b[k].dtype) + else: + raise ValueError(('Type mismatch ({} vs. {}) ' + 'for config key: {}').format(type(b[k]), + type(v), k)) + + # recursively merge dicts + if type(v) is edict: + try: + _merge_a_into_b(a[k], b[k]) + except: + print('Error under config key: {}'.format(k)) + raise + else: + b[k] = v + +def cfg_from_file(filename): + """Load a config file and merge it into the default options.""" + import yaml + with open(filename, 'r') as f: + yaml_cfg = edict(yaml.load(f)) + + _merge_a_into_b(yaml_cfg, __C) + +def cfg_from_list(cfg_list): + """Set config keys via list (e.g., from command line).""" + from ast import literal_eval + assert len(cfg_list) % 2 == 0 + for k, v in zip(cfg_list[0::2], cfg_list[1::2]): + key_list = k.split('.') + d = __C + for subkey in key_list[:-1]: + assert d.has_key(subkey) + d = d[subkey] + subkey = key_list[-1] + assert d.has_key(subkey) + try: + value = literal_eval(v) + except: + # handle the case when v is a string literal + value = v + assert type(value) == type(d[subkey]), \ + 'type {} does not match original type {}'.format( + type(value), type(d[subkey])) + d[subkey] = value diff --git a/lib/fast_rcnn/nms_wrapper.py b/lib/fast_rcnn/nms_wrapper.py new file mode 100644 index 0000000..d1a11db --- /dev/null +++ b/lib/fast_rcnn/nms_wrapper.py @@ -0,0 +1,20 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +from fast_rcnn.config import cfg +from nms.gpu_nms import gpu_nms +from nms.cpu_nms import cpu_nms + +def nms(dets, thresh, force_cpu=False): + """Dispatch to either CPU or GPU NMS implementations.""" + + if dets.shape[0] == 0: + return [] + if cfg.USE_GPU_NMS and not force_cpu: + return gpu_nms(dets, thresh, device_id=cfg.GPU_ID) + else: + return cpu_nms(dets, thresh) diff --git a/lib/fast_rcnn/test.py b/lib/fast_rcnn/test.py new file mode 100644 index 0000000..78c24ae --- /dev/null +++ b/lib/fast_rcnn/test.py @@ -0,0 +1,298 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Test a Fast R-CNN network on an imdb (image database).""" + +from fast_rcnn.config import cfg, get_output_dir +from fast_rcnn.bbox_transform import clip_boxes, bbox_transform_inv +import argparse +from utils.timer import Timer +import numpy as np +import cv2 +import caffe +from fast_rcnn.nms_wrapper import nms +import cPickle +from utils.blob import im_list_to_blob +import os + +def _get_image_blob(im): + """Converts an image into a network input. + + Arguments: + im (ndarray): a color image in BGR order + + Returns: + blob (ndarray): a data blob holding an image pyramid + im_scale_factors (list): list of image scales (relative to im) used + in the image pyramid + """ + im_orig = im.astype(np.float32, copy=True) + im_orig -= cfg.PIXEL_MEANS + + im_shape = im_orig.shape + im_size_min = np.min(im_shape[0:2]) + im_size_max = np.max(im_shape[0:2]) + + processed_ims = [] + im_scale_factors = [] + + for target_size in cfg.TEST.SCALES: + im_scale = float(target_size) / float(im_size_min) + # Prevent the biggest axis from being more than MAX_SIZE + if np.round(im_scale * im_size_max) > cfg.TEST.MAX_SIZE: + im_scale = float(cfg.TEST.MAX_SIZE) / float(im_size_max) + im = cv2.resize(im_orig, None, None, fx=im_scale, fy=im_scale, + interpolation=cv2.INTER_LINEAR) + im_scale_factors.append(im_scale) + processed_ims.append(im) + + # Create a blob to hold the input images + blob = im_list_to_blob(processed_ims) + + return blob, np.array(im_scale_factors) + +def _get_rois_blob(im_rois, im_scale_factors): + """Converts RoIs into network inputs. + + Arguments: + im_rois (ndarray): R x 4 matrix of RoIs in original image coordinates + im_scale_factors (list): scale factors as returned by _get_image_blob + + Returns: + blob (ndarray): R x 5 matrix of RoIs in the image pyramid + """ + rois, levels = _project_im_rois(im_rois, im_scale_factors) + rois_blob = np.hstack((levels, rois)) + return rois_blob.astype(np.float32, copy=False) + +def _project_im_rois(im_rois, scales): + """Project image RoIs into the image pyramid built by _get_image_blob. + + Arguments: + im_rois (ndarray): R x 4 matrix of RoIs in original image coordinates + scales (list): scale factors as returned by _get_image_blob + + Returns: + rois (ndarray): R x 4 matrix of projected RoI coordinates + levels (list): image pyramid levels used by each projected RoI + """ + im_rois = im_rois.astype(np.float, copy=False) + + if len(scales) > 1: + widths = im_rois[:, 2] - im_rois[:, 0] + 1 + heights = im_rois[:, 3] - im_rois[:, 1] + 1 + + areas = widths * heights + scaled_areas = areas[:, np.newaxis] * (scales[np.newaxis, :] ** 2) + diff_areas = np.abs(scaled_areas - 224 * 224) + levels = diff_areas.argmin(axis=1)[:, np.newaxis] + else: + levels = np.zeros((im_rois.shape[0], 1), dtype=np.int) + + rois = im_rois * scales[levels] + + return rois, levels + +def _get_blobs(im, rois): + """Convert an image and RoIs within that image into network inputs.""" + blobs = {'data' : None, 'rois' : None} + blobs['data'], im_scale_factors = _get_image_blob(im) + if not cfg.TEST.HAS_RPN: + blobs['rois'] = _get_rois_blob(rois, im_scale_factors) + return blobs, im_scale_factors + +def im_detect(net, im, boxes=None): + """Detect object classes in an image given object proposals. + + Arguments: + net (caffe.Net): Fast R-CNN network to use + im (ndarray): color image to test (in BGR order) + boxes (ndarray): R x 4 array of object proposals or None (for RPN) + + Returns: + scores (ndarray): R x K array of object class scores (K includes + background as object category 0) + boxes (ndarray): R x (4*K) array of predicted bounding boxes + """ + blobs, im_scales = _get_blobs(im, boxes) + + # When mapping from image ROIs to feature map ROIs, there's some aliasing + # (some distinct image ROIs get mapped to the same feature ROI). + # Here, we identify duplicate feature ROIs, so we only compute features + # on the unique subset. + if cfg.DEDUP_BOXES > 0 and not cfg.TEST.HAS_RPN: + v = np.array([1, 1e3, 1e6, 1e9, 1e12]) + hashes = np.round(blobs['rois'] * cfg.DEDUP_BOXES).dot(v) + _, index, inv_index = np.unique(hashes, return_index=True, + return_inverse=True) + blobs['rois'] = blobs['rois'][index, :] + boxes = boxes[index, :] + + if cfg.TEST.HAS_RPN: + im_blob = blobs['data'] + blobs['im_info'] = np.array( + [[im_blob.shape[2], im_blob.shape[3], im_scales[0]]], + dtype=np.float32) + + # reshape network inputs + net.blobs['data'].reshape(*(blobs['data'].shape)) + if cfg.TEST.HAS_RPN: + net.blobs['im_info'].reshape(*(blobs['im_info'].shape)) + else: + net.blobs['rois'].reshape(*(blobs['rois'].shape)) + + # do forward + forward_kwargs = {'data': blobs['data'].astype(np.float32, copy=False)} + if cfg.TEST.HAS_RPN: + forward_kwargs['im_info'] = blobs['im_info'].astype(np.float32, copy=False) + else: + forward_kwargs['rois'] = blobs['rois'].astype(np.float32, copy=False) + blobs_out = net.forward(**forward_kwargs) + + if cfg.TEST.HAS_RPN: + assert len(im_scales) == 1, "Only single-image batch implemented" + rois = net.blobs['rois'].data.copy() + # unscale back to raw image space + boxes = rois[:, 1:5] / im_scales[0] + + if cfg.TEST.SVM: + # use the raw scores before softmax under the assumption they + # were trained as linear SVMs + scores = net.blobs['cls_score'].data + else: + # use softmax estimated probabilities + scores = blobs_out['cls_prob'] + + if cfg.TEST.BBOX_REG: + # Apply bounding-box regression deltas + box_deltas = blobs_out['bbox_pred'] + pred_boxes = bbox_transform_inv(boxes, box_deltas) + pred_boxes = clip_boxes(pred_boxes, im.shape) + else: + # Simply repeat the boxes, once for each class + pred_boxes = np.tile(boxes, (1, scores.shape[1])) + + if cfg.DEDUP_BOXES > 0 and not cfg.TEST.HAS_RPN: + # Map scores and predictions back to the original set of boxes + scores = scores[inv_index, :] + pred_boxes = pred_boxes[inv_index, :] + + return scores, pred_boxes + +def vis_detections(im, class_name, dets, thresh=0.3): + """Visual debugging of detections.""" + import matplotlib.pyplot as plt + im = im[:, :, (2, 1, 0)] + for i in xrange(np.minimum(10, dets.shape[0])): + bbox = dets[i, :4] + score = dets[i, -1] + if score > thresh: + plt.cla() + plt.imshow(im) + plt.gca().add_patch( + plt.Rectangle((bbox[0], bbox[1]), + bbox[2] - bbox[0], + bbox[3] - bbox[1], fill=False, + edgecolor='g', linewidth=3) + ) + plt.title('{} {:.3f}'.format(class_name, score)) + plt.show() + +def apply_nms(all_boxes, thresh): + """Apply non-maximum suppression to all predicted boxes output by the + test_net method. + """ + num_classes = len(all_boxes) + num_images = len(all_boxes[0]) + nms_boxes = [[[] for _ in xrange(num_images)] + for _ in xrange(num_classes)] + for cls_ind in xrange(num_classes): + for im_ind in xrange(num_images): + dets = all_boxes[cls_ind][im_ind] + if dets == []: + continue + # CPU NMS is much faster than GPU NMS when the number of boxes + # is relative small (e.g., < 10k) + # TODO(rbg): autotune NMS dispatch + keep = nms(dets, thresh, force_cpu=True) + if len(keep) == 0: + continue + nms_boxes[cls_ind][im_ind] = dets[keep, :].copy() + return nms_boxes + +def test_net(net, imdb, max_per_image=100, thresh=0.05, vis=False): + """Test a Fast R-CNN network on an image database.""" + num_images = len(imdb.image_index) + # all detections are collected into: + # all_boxes[cls][image] = N x 5 array of detections in + # (x1, y1, x2, y2, score) + all_boxes = [[[] for _ in xrange(num_images)] + for _ in xrange(imdb.num_classes)] + + output_dir = get_output_dir(imdb, net) + + # timers + _t = {'im_detect' : Timer(), 'misc' : Timer()} + + if not cfg.TEST.HAS_RPN: + roidb = imdb.roidb + + for i in xrange(num_images): + # filter out any ground truth boxes + if cfg.TEST.HAS_RPN: + box_proposals = None + else: + # The roidb may contain ground-truth rois (for example, if the roidb + # comes from the training or val split). We only want to evaluate + # detection on the *non*-ground-truth rois. We select those the rois + # that have the gt_classes field set to 0, which means there's no + # ground truth. + box_proposals = roidb[i]['boxes'][roidb[i]['gt_classes'] == 0] + + im = cv2.imread(imdb.image_path_at(i)) + _t['im_detect'].tic() + scores, boxes = im_detect(net, im, box_proposals) + _t['im_detect'].toc() + + _t['misc'].tic() + # skip j = 0, because it's the background class + for j in xrange(1, imdb.num_classes): + inds = np.where(scores[:, j] > thresh)[0] + cls_scores = scores[inds, j] + if cfg.TEST.AGONISTIC: + cls_boxes = boxes[inds, 4:8] + else: + cls_boxes = boxes[inds, j*4:(j+1)*4] + cls_dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])) \ + .astype(np.float32, copy=False) + keep = nms(cls_dets, cfg.TEST.NMS) + cls_dets = cls_dets[keep, :] + if vis: + vis_detections(im, imdb.classes[j], cls_dets) + all_boxes[j][i] = cls_dets + + # Limit to max_per_image detections *over all classes* + if max_per_image > 0: + image_scores = np.hstack([all_boxes[j][i][:, -1] + for j in xrange(1, imdb.num_classes)]) + if len(image_scores) > max_per_image: + image_thresh = np.sort(image_scores)[-max_per_image] + for j in xrange(1, imdb.num_classes): + keep = np.where(all_boxes[j][i][:, -1] >= image_thresh)[0] + all_boxes[j][i] = all_boxes[j][i][keep, :] + _t['misc'].toc() + + print 'im_detect: {:d}/{:d} {:.3f}s {:.3f}s' \ + .format(i + 1, num_images, _t['im_detect'].average_time, + _t['misc'].average_time) + + det_file = os.path.join(output_dir, 'detections.pkl') + with open(det_file, 'wb') as f: + cPickle.dump(all_boxes, f, cPickle.HIGHEST_PROTOCOL) + + print 'Evaluating detections' + imdb.evaluate_detections(all_boxes, output_dir) diff --git a/lib/fast_rcnn/train.py b/lib/fast_rcnn/train.py new file mode 100644 index 0000000..6ca5437 --- /dev/null +++ b/lib/fast_rcnn/train.py @@ -0,0 +1,186 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Train a Fast R-CNN network.""" + +import caffe +from fast_rcnn.config import cfg +import roi_data_layer.roidb as rdl_roidb +from utils.timer import Timer +import numpy as np +import os + +from caffe.proto import caffe_pb2 +import google.protobuf as pb2 + +class SolverWrapper(object): + """A simple wrapper around Caffe's solver. + This wrapper gives us control over he snapshotting process, which we + use to unnormalize the learned bounding-box regression weights. + """ + + def __init__(self, solver_prototxt, roidb, output_dir, + pretrained_model=None): + """Initialize the SolverWrapper.""" + self.output_dir = output_dir + + if (cfg.TRAIN.HAS_RPN and cfg.TRAIN.BBOX_REG and + cfg.TRAIN.BBOX_NORMALIZE_TARGETS): + # RPN can only use precomputed normalization because there are no + # fixed statistics to compute a priori + assert cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED + + if cfg.TRAIN.BBOX_REG: + print 'Computing bounding-box regression targets...' + self.bbox_means, self.bbox_stds = \ + rdl_roidb.add_bbox_regression_targets(roidb) + print 'done' + + self.solver = caffe.SGDSolver(solver_prototxt) + if pretrained_model is not None: + print ('Loading pretrained model ' + 'weights from {:s}').format(pretrained_model) + self.solver.net.copy_from(pretrained_model) + + self.solver_param = caffe_pb2.SolverParameter() + with open(solver_prototxt, 'rt') as f: + pb2.text_format.Merge(f.read(), self.solver_param) + + self.solver.net.layers[0].set_roidb(roidb) + + def snapshot(self): + """Take a snapshot of the network after unnormalizing the learned + bounding-box regression weights. This enables easy use at test-time. + """ + net = self.solver.net + + scale_bbox_params_faster_rcnn = (cfg.TRAIN.BBOX_REG and + cfg.TRAIN.BBOX_NORMALIZE_TARGETS and + net.params.has_key('bbox_pred')) + + scale_bbox_params_rfcn = (cfg.TRAIN.BBOX_REG and + cfg.TRAIN.BBOX_NORMALIZE_TARGETS and + net.params.has_key('rfcn_bbox')) + + if scale_bbox_params_faster_rcnn: + # save original values + orig_0 = net.params['bbox_pred'][0].data.copy() + orig_1 = net.params['bbox_pred'][1].data.copy() + + # scale and shift with bbox reg unnormalization; then save snapshot + net.params['bbox_pred'][0].data[...] = \ + (net.params['bbox_pred'][0].data * + self.bbox_stds[:, np.newaxis]) + net.params['bbox_pred'][1].data[...] = \ + (net.params['bbox_pred'][1].data * + self.bbox_stds + self.bbox_means) + + + if scale_bbox_params_rfcn: + # save original values + orig_0 = net.params['rfcn_bbox'][0].data.copy() + orig_1 = net.params['rfcn_bbox'][1].data.copy() + repeat = orig_1.shape[0] / self.bbox_means.shape[0] + + + # scale and shift with bbox reg unnormalization; then save snapshot + net.params['rfcn_bbox'][0].data[...] = \ + (net.params['rfcn_bbox'][0].data * + np.repeat(self.bbox_stds, repeat).reshape((orig_1.shape[0], 1, 1, 1))) + net.params['rfcn_bbox'][1].data[...] = \ + (net.params['rfcn_bbox'][1].data * + np.repeat(self.bbox_stds, repeat) + np.repeat(self.bbox_means, repeat)) + + infix = ('_' + cfg.TRAIN.SNAPSHOT_INFIX + if cfg.TRAIN.SNAPSHOT_INFIX != '' else '') + filename = (self.solver_param.snapshot_prefix + infix + + '_iter_{:d}'.format(self.solver.iter) + '.caffemodel') + filename = os.path.join(self.output_dir, filename) + net.save(str(filename)) + print 'Wrote snapshot to: {:s}'.format(filename) + + if scale_bbox_params_faster_rcnn: + # restore net to original state + net.params['bbox_pred'][0].data[...] = orig_0 + net.params['bbox_pred'][1].data[...] = orig_1 + if scale_bbox_params_rfcn: + # restore net to original state + net.params['rfcn_bbox'][0].data[...] = orig_0 + net.params['rfcn_bbox'][1].data[...] = orig_1 + + return filename + + def train_model(self, max_iters): + """Network training loop.""" + last_snapshot_iter = -1 + timer = Timer() + model_paths = [] + while self.solver.iter < max_iters: + # Make one SGD update + timer.tic() + self.solver.step(1) + timer.toc() + if self.solver.iter % (10 * self.solver_param.display) == 0: + print 'speed: {:.3f}s / iter'.format(timer.average_time) + + if self.solver.iter % cfg.TRAIN.SNAPSHOT_ITERS == 0: + last_snapshot_iter = self.solver.iter + model_paths.append(self.snapshot()) + + if last_snapshot_iter != self.solver.iter: + model_paths.append(self.snapshot()) + return model_paths + +def get_training_roidb(imdb): + """Returns a roidb (Region of Interest database) for use in training.""" + if cfg.TRAIN.USE_FLIPPED: + print 'Appending horizontally-flipped training examples...' + imdb.append_flipped_images() + print 'done' + + print 'Preparing training data...' + rdl_roidb.prepare_roidb(imdb) + print 'done' + + return imdb.roidb + +def filter_roidb(roidb): + """Remove roidb entries that have no usable RoIs.""" + + def is_valid(entry): + # Valid images have: + # (1) At least one foreground RoI OR + # (2) At least one background RoI + overlaps = entry['max_overlaps'] + # find boxes with sufficient overlap + fg_inds = np.where(overlaps >= cfg.TRAIN.FG_THRESH)[0] + # Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI) + bg_inds = np.where((overlaps < cfg.TRAIN.BG_THRESH_HI) & + (overlaps >= cfg.TRAIN.BG_THRESH_LO))[0] + # image is only valid if such boxes exist + valid = len(fg_inds) > 0 or len(bg_inds) > 0 + return valid + + num = len(roidb) + filtered_roidb = [entry for entry in roidb if is_valid(entry)] + num_after = len(filtered_roidb) + print 'Filtered {} roidb entries: {} -> {}'.format(num - num_after, + num, num_after) + return filtered_roidb + +def train_net(solver_prototxt, roidb, output_dir, + pretrained_model=None, max_iters=40000): + """Train a Fast R-CNN network.""" + + roidb = filter_roidb(roidb) + sw = SolverWrapper(solver_prototxt, roidb, output_dir, + pretrained_model=pretrained_model) + + print 'Solving...' + model_paths = sw.train_model(max_iters) + print 'done solving' + return model_paths diff --git a/lib/nms/.gitignore b/lib/nms/.gitignore new file mode 100644 index 0000000..15a165d --- /dev/null +++ b/lib/nms/.gitignore @@ -0,0 +1,3 @@ +*.c +*.cpp +*.so diff --git a/lib/nms/__init__.py b/lib/nms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/nms/cpu_nms.pyx b/lib/nms/cpu_nms.pyx new file mode 100644 index 0000000..1d0bef3 --- /dev/null +++ b/lib/nms/cpu_nms.pyx @@ -0,0 +1,68 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import numpy as np +cimport numpy as np + +cdef inline np.float32_t max(np.float32_t a, np.float32_t b): + return a if a >= b else b + +cdef inline np.float32_t min(np.float32_t a, np.float32_t b): + return a if a <= b else b + +def cpu_nms(np.ndarray[np.float32_t, ndim=2] dets, np.float thresh): + cdef np.ndarray[np.float32_t, ndim=1] x1 = dets[:, 0] + cdef np.ndarray[np.float32_t, ndim=1] y1 = dets[:, 1] + cdef np.ndarray[np.float32_t, ndim=1] x2 = dets[:, 2] + cdef np.ndarray[np.float32_t, ndim=1] y2 = dets[:, 3] + cdef np.ndarray[np.float32_t, ndim=1] scores = dets[:, 4] + + cdef np.ndarray[np.float32_t, ndim=1] areas = (x2 - x1 + 1) * (y2 - y1 + 1) + cdef np.ndarray[np.int_t, ndim=1] order = scores.argsort()[::-1] + + cdef int ndets = dets.shape[0] + cdef np.ndarray[np.int_t, ndim=1] suppressed = \ + np.zeros((ndets), dtype=np.int) + + # nominal indices + cdef int _i, _j + # sorted indices + cdef int i, j + # temp variables for box i's (the box currently under consideration) + cdef np.float32_t ix1, iy1, ix2, iy2, iarea + # variables for computing overlap with box j (lower scoring box) + cdef np.float32_t xx1, yy1, xx2, yy2 + cdef np.float32_t w, h + cdef np.float32_t inter, ovr + + keep = [] + for _i in range(ndets): + i = order[_i] + if suppressed[i] == 1: + continue + keep.append(i) + ix1 = x1[i] + iy1 = y1[i] + ix2 = x2[i] + iy2 = y2[i] + iarea = areas[i] + for _j in range(_i + 1, ndets): + j = order[_j] + if suppressed[j] == 1: + continue + xx1 = max(ix1, x1[j]) + yy1 = max(iy1, y1[j]) + xx2 = min(ix2, x2[j]) + yy2 = min(iy2, y2[j]) + w = max(0.0, xx2 - xx1 + 1) + h = max(0.0, yy2 - yy1 + 1) + inter = w * h + ovr = inter / (iarea + areas[j] - inter) + if ovr >= thresh: + suppressed[j] = 1 + + return keep diff --git a/lib/nms/gpu_nms.hpp b/lib/nms/gpu_nms.hpp new file mode 100644 index 0000000..68b6d42 --- /dev/null +++ b/lib/nms/gpu_nms.hpp @@ -0,0 +1,2 @@ +void _nms(int* keep_out, int* num_out, const float* boxes_host, int boxes_num, + int boxes_dim, float nms_overlap_thresh, int device_id); diff --git a/lib/nms/gpu_nms.pyx b/lib/nms/gpu_nms.pyx new file mode 100644 index 0000000..59d84af --- /dev/null +++ b/lib/nms/gpu_nms.pyx @@ -0,0 +1,31 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import numpy as np +cimport numpy as np + +assert sizeof(int) == sizeof(np.int32_t) + +cdef extern from "gpu_nms.hpp": + void _nms(np.int32_t*, int*, np.float32_t*, int, int, float, int) + +def gpu_nms(np.ndarray[np.float32_t, ndim=2] dets, np.float thresh, + np.int32_t device_id=0): + cdef int boxes_num = dets.shape[0] + cdef int boxes_dim = dets.shape[1] + cdef int num_out + cdef np.ndarray[np.int32_t, ndim=1] \ + keep = np.zeros(boxes_num, dtype=np.int32) + cdef np.ndarray[np.float32_t, ndim=1] \ + scores = dets[:, 4] + cdef np.ndarray[np.int_t, ndim=1] \ + order = scores.argsort()[::-1] + cdef np.ndarray[np.float32_t, ndim=2] \ + sorted_dets = dets[order, :] + _nms(&keep[0], &num_out, &sorted_dets[0, 0], boxes_num, boxes_dim, thresh, device_id) + keep = keep[:num_out] + return list(order[keep]) diff --git a/lib/nms/nms_kernel.cu b/lib/nms/nms_kernel.cu new file mode 100644 index 0000000..038a590 --- /dev/null +++ b/lib/nms/nms_kernel.cu @@ -0,0 +1,144 @@ +// ------------------------------------------------------------------ +// Faster R-CNN +// Copyright (c) 2015 Microsoft +// Licensed under The MIT License [see fast-rcnn/LICENSE for details] +// Written by Shaoqing Ren +// ------------------------------------------------------------------ + +#include "gpu_nms.hpp" +#include +#include + +#define CUDA_CHECK(condition) \ + /* Code block avoids redefinition of cudaError_t error */ \ + do { \ + cudaError_t error = condition; \ + if (error != cudaSuccess) { \ + std::cout << cudaGetErrorString(error) << std::endl; \ + } \ + } while (0) + +#define DIVUP(m,n) ((m) / (n) + ((m) % (n) > 0)) +int const threadsPerBlock = sizeof(unsigned long long) * 8; + +__device__ inline float devIoU(float const * const a, float const * const b) { + float left = max(a[0], b[0]), right = min(a[2], b[2]); + float top = max(a[1], b[1]), bottom = min(a[3], b[3]); + float width = max(right - left + 1, 0.f), height = max(bottom - top + 1, 0.f); + float interS = width * height; + float Sa = (a[2] - a[0] + 1) * (a[3] - a[1] + 1); + float Sb = (b[2] - b[0] + 1) * (b[3] - b[1] + 1); + return interS / (Sa + Sb - interS); +} + +__global__ void nms_kernel(const int n_boxes, const float nms_overlap_thresh, + const float *dev_boxes, unsigned long long *dev_mask) { + const int row_start = blockIdx.y; + const int col_start = blockIdx.x; + + // if (row_start > col_start) return; + + const int row_size = + min(n_boxes - row_start * threadsPerBlock, threadsPerBlock); + const int col_size = + min(n_boxes - col_start * threadsPerBlock, threadsPerBlock); + + __shared__ float block_boxes[threadsPerBlock * 5]; + if (threadIdx.x < col_size) { + block_boxes[threadIdx.x * 5 + 0] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 0]; + block_boxes[threadIdx.x * 5 + 1] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 1]; + block_boxes[threadIdx.x * 5 + 2] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 2]; + block_boxes[threadIdx.x * 5 + 3] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 3]; + block_boxes[threadIdx.x * 5 + 4] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 4]; + } + __syncthreads(); + + if (threadIdx.x < row_size) { + const int cur_box_idx = threadsPerBlock * row_start + threadIdx.x; + const float *cur_box = dev_boxes + cur_box_idx * 5; + int i = 0; + unsigned long long t = 0; + int start = 0; + if (row_start == col_start) { + start = threadIdx.x + 1; + } + for (i = start; i < col_size; i++) { + if (devIoU(cur_box, block_boxes + i * 5) > nms_overlap_thresh) { + t |= 1ULL << i; + } + } + const int col_blocks = DIVUP(n_boxes, threadsPerBlock); + dev_mask[cur_box_idx * col_blocks + col_start] = t; + } +} + +void _set_device(int device_id) { + int current_device; + CUDA_CHECK(cudaGetDevice(¤t_device)); + if (current_device == device_id) { + return; + } + // The call to cudaSetDevice must come before any calls to Get, which + // may perform initialization using the GPU. + CUDA_CHECK(cudaSetDevice(device_id)); +} + +void _nms(int* keep_out, int* num_out, const float* boxes_host, int boxes_num, + int boxes_dim, float nms_overlap_thresh, int device_id) { + _set_device(device_id); + + float* boxes_dev = NULL; + unsigned long long* mask_dev = NULL; + + const int col_blocks = DIVUP(boxes_num, threadsPerBlock); + + CUDA_CHECK(cudaMalloc(&boxes_dev, + boxes_num * boxes_dim * sizeof(float))); + CUDA_CHECK(cudaMemcpy(boxes_dev, + boxes_host, + boxes_num * boxes_dim * sizeof(float), + cudaMemcpyHostToDevice)); + + CUDA_CHECK(cudaMalloc(&mask_dev, + boxes_num * col_blocks * sizeof(unsigned long long))); + + dim3 blocks(DIVUP(boxes_num, threadsPerBlock), + DIVUP(boxes_num, threadsPerBlock)); + dim3 threads(threadsPerBlock); + nms_kernel<<>>(boxes_num, + nms_overlap_thresh, + boxes_dev, + mask_dev); + + std::vector mask_host(boxes_num * col_blocks); + CUDA_CHECK(cudaMemcpy(&mask_host[0], + mask_dev, + sizeof(unsigned long long) * boxes_num * col_blocks, + cudaMemcpyDeviceToHost)); + + std::vector remv(col_blocks); + memset(&remv[0], 0, sizeof(unsigned long long) * col_blocks); + + int num_to_keep = 0; + for (int i = 0; i < boxes_num; i++) { + int nblock = i / threadsPerBlock; + int inblock = i % threadsPerBlock; + + if (!(remv[nblock] & (1ULL << inblock))) { + keep_out[num_to_keep++] = i; + unsigned long long *p = &mask_host[0] + i * col_blocks; + for (int j = nblock; j < col_blocks; j++) { + remv[j] |= p[j]; + } + } + } + *num_out = num_to_keep; + + CUDA_CHECK(cudaFree(boxes_dev)); + CUDA_CHECK(cudaFree(mask_dev)); +} diff --git a/lib/nms/py_cpu_nms.py b/lib/nms/py_cpu_nms.py new file mode 100644 index 0000000..54e7b25 --- /dev/null +++ b/lib/nms/py_cpu_nms.py @@ -0,0 +1,38 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import numpy as np + +def py_cpu_nms(dets, thresh): + """Pure Python NMS baseline.""" + x1 = dets[:, 0] + y1 = dets[:, 1] + x2 = dets[:, 2] + y2 = dets[:, 3] + scores = dets[:, 4] + + areas = (x2 - x1 + 1) * (y2 - y1 + 1) + order = scores.argsort()[::-1] + + keep = [] + while order.size > 0: + i = order[0] + keep.append(i) + xx1 = np.maximum(x1[i], x1[order[1:]]) + yy1 = np.maximum(y1[i], y1[order[1:]]) + xx2 = np.minimum(x2[i], x2[order[1:]]) + yy2 = np.minimum(y2[i], y2[order[1:]]) + + w = np.maximum(0.0, xx2 - xx1 + 1) + h = np.maximum(0.0, yy2 - yy1 + 1) + inter = w * h + ovr = inter / (areas[i] + areas[order[1:]] - inter) + + inds = np.where(ovr <= thresh)[0] + order = order[inds + 1] + + return keep diff --git a/lib/pycocotools/UPSTREAM_REV b/lib/pycocotools/UPSTREAM_REV new file mode 100644 index 0000000..706219b --- /dev/null +++ b/lib/pycocotools/UPSTREAM_REV @@ -0,0 +1 @@ +https://github.com/pdollar/coco/commit/3ac47c77ebd5a1ed4254a98b7fbf2ef4765a3574 diff --git a/lib/pycocotools/__init__.py b/lib/pycocotools/__init__.py new file mode 100644 index 0000000..3f7d85b --- /dev/null +++ b/lib/pycocotools/__init__.py @@ -0,0 +1 @@ +__author__ = 'tylin' diff --git a/lib/pycocotools/_mask.pyx b/lib/pycocotools/_mask.pyx new file mode 100644 index 0000000..e08f1f4 --- /dev/null +++ b/lib/pycocotools/_mask.pyx @@ -0,0 +1,291 @@ +# distutils: language = c +# distutils: sources = ../MatlabAPI/private/maskApi.c + +#************************************************************************** +# Microsoft COCO Toolbox. version 2.0 +# Data, paper, and tutorials available at: http://mscoco.org/ +# Code written by Piotr Dollar and Tsung-Yi Lin, 2015. +# Licensed under the Simplified BSD License [see coco/license.txt] +#************************************************************************** + +__author__ = 'tsungyi' + +# import both Python-level and C-level symbols of Numpy +# the API uses Numpy to interface C and Python +import numpy as np +cimport numpy as np +from libc.stdlib cimport malloc, free + +# intialized Numpy. must do. +np.import_array() + +# import numpy C function +# we use PyArray_ENABLEFLAGS to make Numpy ndarray responsible to memoery management +cdef extern from "numpy/arrayobject.h": + void PyArray_ENABLEFLAGS(np.ndarray arr, int flags) + +# Declare the prototype of the C functions in MaskApi.h +cdef extern from "maskApi.h": + ctypedef unsigned int uint + ctypedef unsigned long siz + ctypedef unsigned char byte + ctypedef double* BB + ctypedef struct RLE: + siz h, + siz w, + siz m, + uint* cnts, + void rlesInit( RLE **R, siz n ) + void rleEncode( RLE *R, const byte *M, siz h, siz w, siz n ) + void rleDecode( const RLE *R, byte *mask, siz n ) + void rleMerge( const RLE *R, RLE *M, siz n, bint intersect ) + void rleArea( const RLE *R, siz n, uint *a ) + void rleIou( RLE *dt, RLE *gt, siz m, siz n, byte *iscrowd, double *o ) + void bbIou( BB dt, BB gt, siz m, siz n, byte *iscrowd, double *o ) + void rleToBbox( const RLE *R, BB bb, siz n ) + void rleFrBbox( RLE *R, const BB bb, siz h, siz w, siz n ) + void rleFrPoly( RLE *R, const double *xy, siz k, siz h, siz w ) + char* rleToString( const RLE *R ) + void rleFrString( RLE *R, char *s, siz h, siz w ) + +# python class to wrap RLE array in C +# the class handles the memory allocation and deallocation +cdef class RLEs: + cdef RLE *_R + cdef siz _n + + def __cinit__(self, siz n =0): + rlesInit(&self._R, n) + self._n = n + + # free the RLE array here + def __dealloc__(self): + if self._R is not NULL: + for i in range(self._n): + free(self._R[i].cnts) + free(self._R) + def __getattr__(self, key): + if key == 'n': + return self._n + raise AttributeError(key) + +# python class to wrap Mask array in C +# the class handles the memory allocation and deallocation +cdef class Masks: + cdef byte *_mask + cdef siz _h + cdef siz _w + cdef siz _n + + def __cinit__(self, h, w, n): + self._mask = malloc(h*w*n* sizeof(byte)) + self._h = h + self._w = w + self._n = n + # def __dealloc__(self): + # the memory management of _mask has been passed to np.ndarray + # it doesn't need to be freed here + + # called when passing into np.array() and return an np.ndarray in column-major order + def __array__(self): + cdef np.npy_intp shape[1] + shape[0] = self._h*self._w*self._n + # Create a 1D array, and reshape it to fortran/Matlab column-major array + ndarray = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT8, self._mask).reshape((self._h, self._w, self._n), order='F') + # The _mask allocated by Masks is now handled by ndarray + PyArray_ENABLEFLAGS(ndarray, np.NPY_OWNDATA) + return ndarray + +# internal conversion from Python RLEs object to compressed RLE format +def _toString(RLEs Rs): + cdef siz n = Rs.n + cdef bytes py_string + cdef char* c_string + objs = [] + for i in range(n): + c_string = rleToString( &Rs._R[i] ) + py_string = c_string + objs.append({ + 'size': [Rs._R[i].h, Rs._R[i].w], + 'counts': py_string + }) + free(c_string) + return objs + +# internal conversion from compressed RLE format to Python RLEs object +def _frString(rleObjs): + cdef siz n = len(rleObjs) + Rs = RLEs(n) + cdef bytes py_string + cdef char* c_string + for i, obj in enumerate(rleObjs): + py_string = str(obj['counts']) + c_string = py_string + rleFrString( &Rs._R[i], c_string, obj['size'][0], obj['size'][1] ) + return Rs + +# encode mask to RLEs objects +# list of RLE string can be generated by RLEs member function +def encode(np.ndarray[np.uint8_t, ndim=3, mode='fortran'] mask): + h, w, n = mask.shape[0], mask.shape[1], mask.shape[2] + cdef RLEs Rs = RLEs(n) + rleEncode(Rs._R,mask.data,h,w,n) + objs = _toString(Rs) + return objs + +# decode mask from compressed list of RLE string or RLEs object +def decode(rleObjs): + cdef RLEs Rs = _frString(rleObjs) + h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n + masks = Masks(h, w, n) + rleDecode( Rs._R, masks._mask, n ); + return np.array(masks) + +def merge(rleObjs, bint intersect=0): + cdef RLEs Rs = _frString(rleObjs) + cdef RLEs R = RLEs(1) + rleMerge(Rs._R, R._R, Rs._n, intersect) + obj = _toString(R)[0] + return obj + +def area(rleObjs): + cdef RLEs Rs = _frString(rleObjs) + cdef uint* _a = malloc(Rs._n* sizeof(uint)) + rleArea(Rs._R, Rs._n, _a) + cdef np.npy_intp shape[1] + shape[0] = Rs._n + a = np.array((Rs._n, ), dtype=np.uint8) + a = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT32, _a) + PyArray_ENABLEFLAGS(a, np.NPY_OWNDATA) + return a + +# iou computation. support function overload (RLEs-RLEs and bbox-bbox). +def iou( dt, gt, pyiscrowd ): + def _preproc(objs): + if len(objs) == 0: + return objs + if type(objs) == np.ndarray: + if len(objs.shape) == 1: + objs = objs.reshape((objs[0], 1)) + # check if it's Nx4 bbox + if not len(objs.shape) == 2 or not objs.shape[1] == 4: + raise Exception('numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension') + objs = objs.astype(np.double) + elif type(objs) == list: + # check if list is in box format and convert it to np.ndarray + isbox = np.all(np.array([(len(obj)==4) and ((type(obj)==list) or (type(obj)==np.ndarray)) for obj in objs])) + isrle = np.all(np.array([type(obj) == dict for obj in objs])) + if isbox: + objs = np.array(objs, dtype=np.double) + if len(objs.shape) == 1: + objs = objs.reshape((1,objs.shape[0])) + elif isrle: + objs = _frString(objs) + else: + raise Exception('list input can be bounding box (Nx4) or RLEs ([RLE])') + else: + raise Exception('unrecognized type. The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.') + return objs + def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou): + rleIou( dt._R, gt._R, m, n, iscrowd.data, _iou.data ) + def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou): + bbIou( dt.data, gt.data, m, n, iscrowd.data, _iou.data ) + def _len(obj): + cdef siz N = 0 + if type(obj) == RLEs: + N = obj.n + elif len(obj)==0: + pass + elif type(obj) == np.ndarray: + N = obj.shape[0] + return N + # convert iscrowd to numpy array + cdef np.ndarray[np.uint8_t, ndim=1] iscrowd = np.array(pyiscrowd, dtype=np.uint8) + # simple type checking + cdef siz m, n + dt = _preproc(dt) + gt = _preproc(gt) + m = _len(dt) + n = _len(gt) + if m == 0 or n == 0: + return [] + if not type(dt) == type(gt): + raise Exception('The dt and gt should have the same data type, either RLEs, list or np.ndarray') + + # define local variables + cdef double* _iou = 0 + cdef np.npy_intp shape[1] + # check type and assign iou function + if type(dt) == RLEs: + _iouFun = _rleIou + elif type(dt) == np.ndarray: + _iouFun = _bbIou + else: + raise Exception('input data type not allowed.') + _iou = malloc(m*n* sizeof(double)) + iou = np.zeros((m*n, ), dtype=np.double) + shape[0] = m*n + iou = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _iou) + PyArray_ENABLEFLAGS(iou, np.NPY_OWNDATA) + _iouFun(dt, gt, iscrowd, m, n, iou) + return iou.reshape((m,n), order='F') + +def toBbox( rleObjs ): + cdef RLEs Rs = _frString(rleObjs) + cdef siz n = Rs.n + cdef BB _bb = malloc(4*n* sizeof(double)) + rleToBbox( Rs._R, _bb, n ) + cdef np.npy_intp shape[1] + shape[0] = 4*n + bb = np.array((1,4*n), dtype=np.double) + bb = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _bb).reshape((n, 4)) + PyArray_ENABLEFLAGS(bb, np.NPY_OWNDATA) + return bb + +def frBbox(np.ndarray[np.double_t, ndim=2] bb, siz h, siz w ): + cdef siz n = bb.shape[0] + Rs = RLEs(n) + rleFrBbox( Rs._R, bb.data, h, w, n ) + objs = _toString(Rs) + return objs + +def frPoly( poly, siz h, siz w ): + cdef np.ndarray[np.double_t, ndim=1] np_poly + n = len(poly) + Rs = RLEs(n) + for i, p in enumerate(poly): + np_poly = np.array(p, dtype=np.double, order='F') + rleFrPoly( &Rs._R[i], np_poly.data, len(np_poly)/2, h, w ) + objs = _toString(Rs) + return objs + +def frUncompressedRLE(ucRles, siz h, siz w): + cdef np.ndarray[np.uint32_t, ndim=1] cnts + cdef RLE R + cdef uint *data + n = len(ucRles) + objs = [] + for i in range(n): + Rs = RLEs(1) + cnts = np.array(ucRles[i]['counts'], dtype=np.uint32) + # time for malloc can be saved here but it's fine + data = malloc(len(cnts)* sizeof(uint)) + for j in range(len(cnts)): + data[j] = cnts[j] + R = RLE(ucRles[i]['size'][0], ucRles[i]['size'][1], len(cnts), data) + Rs._R[0] = R + objs.append(_toString(Rs)[0]) + return objs + +def frPyObjects(pyobj, siz h, w): + if type(pyobj) == np.ndarray: + objs = frBbox(pyobj, h, w ) + elif type(pyobj) == list and len(pyobj[0]) == 4: + objs = frBbox(pyobj, h, w ) + elif type(pyobj) == list and len(pyobj[0]) > 4: + objs = frPoly(pyobj, h, w ) + elif type(pyobj) == list and type(pyobj[0]) == dict: + objs = frUncompressedRLE(pyobj, h, w) + else: + raise Exception('input type is not supported.') + return objs diff --git a/lib/pycocotools/coco.py b/lib/pycocotools/coco.py new file mode 100644 index 0000000..5d9f6b8 --- /dev/null +++ b/lib/pycocotools/coco.py @@ -0,0 +1,351 @@ +__author__ = 'tylin' +__version__ = '1.0.1' +# Interface for accessing the Microsoft COCO dataset. + +# Microsoft COCO is a large image dataset designed for object detection, +# segmentation, and caption generation. pycocotools is a Python API that +# assists in loading, parsing and visualizing the annotations in COCO. +# Please visit http://mscoco.org/ for more information on COCO, including +# for the data, paper, and tutorials. The exact format of the annotations +# is also described on the COCO website. For example usage of the pycocotools +# please see pycocotools_demo.ipynb. In addition to this API, please download both +# the COCO images and annotations in order to run the demo. + +# An alternative to using the API is to load the annotations directly +# into Python dictionary +# Using the API provides additional utility functions. Note that this API +# supports both *instance* and *caption* annotations. In the case of +# captions not all functions are defined (e.g. categories are undefined). + +# The following API functions are defined: +# COCO - COCO api class that loads COCO annotation file and prepare data structures. +# decodeMask - Decode binary mask M encoded via run-length encoding. +# encodeMask - Encode binary mask M using run-length encoding. +# getAnnIds - Get ann ids that satisfy given filter conditions. +# getCatIds - Get cat ids that satisfy given filter conditions. +# getImgIds - Get img ids that satisfy given filter conditions. +# loadAnns - Load anns with the specified ids. +# loadCats - Load cats with the specified ids. +# loadImgs - Load imgs with the specified ids. +# segToMask - Convert polygon segmentation to binary mask. +# showAnns - Display the specified annotations. +# loadRes - Load algorithm results and create API for accessing them. +# download - Download COCO images from mscoco.org server. +# Throughout the API "ann"=annotation, "cat"=category, and "img"=image. +# Help on each functions can be accessed by: "help COCO>function". + +# See also COCO>decodeMask, +# COCO>encodeMask, COCO>getAnnIds, COCO>getCatIds, +# COCO>getImgIds, COCO>loadAnns, COCO>loadCats, +# COCO>loadImgs, COCO>segToMask, COCO>showAnns + +# Microsoft COCO Toolbox. version 2.0 +# Data, paper, and tutorials available at: http://mscoco.org/ +# Code written by Piotr Dollar and Tsung-Yi Lin, 2014. +# Licensed under the Simplified BSD License [see bsd.txt] + +import json +import datetime +import time +import matplotlib.pyplot as plt +from matplotlib.collections import PatchCollection +from matplotlib.patches import Polygon +import numpy as np +from skimage.draw import polygon +import urllib +import copy +import itertools +import mask +import os + +class COCO: + def __init__(self, annotation_file=None): + """ + Constructor of Microsoft COCO helper class for reading and visualizing annotations. + :param annotation_file (str): location of annotation file + :param image_folder (str): location to the folder that hosts images. + :return: + """ + # load dataset + self.dataset = {} + self.anns = [] + self.imgToAnns = {} + self.catToImgs = {} + self.imgs = {} + self.cats = {} + if not annotation_file == None: + print 'loading annotations into memory...' + tic = time.time() + dataset = json.load(open(annotation_file, 'r')) + print 'Done (t=%0.2fs)'%(time.time()- tic) + self.dataset = dataset + self.createIndex() + + def createIndex(self): + # create index + print 'creating index...' + anns = {} + imgToAnns = {} + catToImgs = {} + cats = {} + imgs = {} + if 'annotations' in self.dataset: + imgToAnns = {ann['image_id']: [] for ann in self.dataset['annotations']} + anns = {ann['id']: [] for ann in self.dataset['annotations']} + for ann in self.dataset['annotations']: + imgToAnns[ann['image_id']] += [ann] + anns[ann['id']] = ann + + if 'images' in self.dataset: + imgs = {im['id']: {} for im in self.dataset['images']} + for img in self.dataset['images']: + imgs[img['id']] = img + + if 'categories' in self.dataset: + cats = {cat['id']: [] for cat in self.dataset['categories']} + for cat in self.dataset['categories']: + cats[cat['id']] = cat + catToImgs = {cat['id']: [] for cat in self.dataset['categories']} + if 'annotations' in self.dataset: + for ann in self.dataset['annotations']: + catToImgs[ann['category_id']] += [ann['image_id']] + + print 'index created!' + + # create class members + self.anns = anns + self.imgToAnns = imgToAnns + self.catToImgs = catToImgs + self.imgs = imgs + self.cats = cats + + def info(self): + """ + Print information about the annotation file. + :return: + """ + for key, value in self.dataset['info'].items(): + print '%s: %s'%(key, value) + + def getAnnIds(self, imgIds=[], catIds=[], areaRng=[], iscrowd=None): + """ + Get ann ids that satisfy given filter conditions. default skips that filter + :param imgIds (int array) : get anns for given imgs + catIds (int array) : get anns for given cats + areaRng (float array) : get anns for given area range (e.g. [0 inf]) + iscrowd (boolean) : get anns for given crowd label (False or True) + :return: ids (int array) : integer array of ann ids + """ + imgIds = imgIds if type(imgIds) == list else [imgIds] + catIds = catIds if type(catIds) == list else [catIds] + + if len(imgIds) == len(catIds) == len(areaRng) == 0: + anns = self.dataset['annotations'] + else: + if not len(imgIds) == 0: + # this can be changed by defaultdict + lists = [self.imgToAnns[imgId] for imgId in imgIds if imgId in self.imgToAnns] + anns = list(itertools.chain.from_iterable(lists)) + else: + anns = self.dataset['annotations'] + anns = anns if len(catIds) == 0 else [ann for ann in anns if ann['category_id'] in catIds] + anns = anns if len(areaRng) == 0 else [ann for ann in anns if ann['area'] > areaRng[0] and ann['area'] < areaRng[1]] + if not iscrowd == None: + ids = [ann['id'] for ann in anns if ann['iscrowd'] == iscrowd] + else: + ids = [ann['id'] for ann in anns] + return ids + + def getCatIds(self, catNms=[], supNms=[], catIds=[]): + """ + filtering parameters. default skips that filter. + :param catNms (str array) : get cats for given cat names + :param supNms (str array) : get cats for given supercategory names + :param catIds (int array) : get cats for given cat ids + :return: ids (int array) : integer array of cat ids + """ + catNms = catNms if type(catNms) == list else [catNms] + supNms = supNms if type(supNms) == list else [supNms] + catIds = catIds if type(catIds) == list else [catIds] + + if len(catNms) == len(supNms) == len(catIds) == 0: + cats = self.dataset['categories'] + else: + cats = self.dataset['categories'] + cats = cats if len(catNms) == 0 else [cat for cat in cats if cat['name'] in catNms] + cats = cats if len(supNms) == 0 else [cat for cat in cats if cat['supercategory'] in supNms] + cats = cats if len(catIds) == 0 else [cat for cat in cats if cat['id'] in catIds] + ids = [cat['id'] for cat in cats] + return ids + + def getImgIds(self, imgIds=[], catIds=[]): + ''' + Get img ids that satisfy given filter conditions. + :param imgIds (int array) : get imgs for given ids + :param catIds (int array) : get imgs with all given cats + :return: ids (int array) : integer array of img ids + ''' + imgIds = imgIds if type(imgIds) == list else [imgIds] + catIds = catIds if type(catIds) == list else [catIds] + + if len(imgIds) == len(catIds) == 0: + ids = self.imgs.keys() + else: + ids = set(imgIds) + for i, catId in enumerate(catIds): + if i == 0 and len(ids) == 0: + ids = set(self.catToImgs[catId]) + else: + ids &= set(self.catToImgs[catId]) + return list(ids) + + def loadAnns(self, ids=[]): + """ + Load anns with the specified ids. + :param ids (int array) : integer ids specifying anns + :return: anns (object array) : loaded ann objects + """ + if type(ids) == list: + return [self.anns[id] for id in ids] + elif type(ids) == int: + return [self.anns[ids]] + + def loadCats(self, ids=[]): + """ + Load cats with the specified ids. + :param ids (int array) : integer ids specifying cats + :return: cats (object array) : loaded cat objects + """ + if type(ids) == list: + return [self.cats[id] for id in ids] + elif type(ids) == int: + return [self.cats[ids]] + + def loadImgs(self, ids=[]): + """ + Load anns with the specified ids. + :param ids (int array) : integer ids specifying img + :return: imgs (object array) : loaded img objects + """ + if type(ids) == list: + return [self.imgs[id] for id in ids] + elif type(ids) == int: + return [self.imgs[ids]] + + def showAnns(self, anns): + """ + Display the specified annotations. + :param anns (array of object): annotations to display + :return: None + """ + if len(anns) == 0: + return 0 + if 'segmentation' in anns[0]: + datasetType = 'instances' + elif 'caption' in anns[0]: + datasetType = 'captions' + if datasetType == 'instances': + ax = plt.gca() + polygons = [] + color = [] + for ann in anns: + c = np.random.random((1, 3)).tolist()[0] + if type(ann['segmentation']) == list: + # polygon + for seg in ann['segmentation']: + poly = np.array(seg).reshape((len(seg)/2, 2)) + polygons.append(Polygon(poly, True,alpha=0.4)) + color.append(c) + else: + # mask + t = self.imgs[ann['image_id']] + if type(ann['segmentation']['counts']) == list: + rle = mask.frPyObjects([ann['segmentation']], t['height'], t['width']) + else: + rle = [ann['segmentation']] + m = mask.decode(rle) + img = np.ones( (m.shape[0], m.shape[1], 3) ) + if ann['iscrowd'] == 1: + color_mask = np.array([2.0,166.0,101.0])/255 + if ann['iscrowd'] == 0: + color_mask = np.random.random((1, 3)).tolist()[0] + for i in range(3): + img[:,:,i] = color_mask[i] + ax.imshow(np.dstack( (img, m*0.5) )) + p = PatchCollection(polygons, facecolors=color, edgecolors=(0,0,0,1), linewidths=3, alpha=0.4) + ax.add_collection(p) + elif datasetType == 'captions': + for ann in anns: + print ann['caption'] + + def loadRes(self, resFile): + """ + Load result file and return a result api object. + :param resFile (str) : file name of result file + :return: res (obj) : result api object + """ + res = COCO() + res.dataset['images'] = [img for img in self.dataset['images']] + # res.dataset['info'] = copy.deepcopy(self.dataset['info']) + # res.dataset['licenses'] = copy.deepcopy(self.dataset['licenses']) + + print 'Loading and preparing results... ' + tic = time.time() + anns = json.load(open(resFile)) + assert type(anns) == list, 'results in not an array of objects' + annsImgIds = [ann['image_id'] for ann in anns] + assert set(annsImgIds) == (set(annsImgIds) & set(self.getImgIds())), \ + 'Results do not correspond to current coco set' + if 'caption' in anns[0]: + imgIds = set([img['id'] for img in res.dataset['images']]) & set([ann['image_id'] for ann in anns]) + res.dataset['images'] = [img for img in res.dataset['images'] if img['id'] in imgIds] + for id, ann in enumerate(anns): + ann['id'] = id+1 + elif 'bbox' in anns[0] and not anns[0]['bbox'] == []: + res.dataset['categories'] = copy.deepcopy(self.dataset['categories']) + for id, ann in enumerate(anns): + bb = ann['bbox'] + x1, x2, y1, y2 = [bb[0], bb[0]+bb[2], bb[1], bb[1]+bb[3]] + if not 'segmentation' in ann: + ann['segmentation'] = [[x1, y1, x1, y2, x2, y2, x2, y1]] + ann['area'] = bb[2]*bb[3] + ann['id'] = id+1 + ann['iscrowd'] = 0 + elif 'segmentation' in anns[0]: + res.dataset['categories'] = copy.deepcopy(self.dataset['categories']) + for id, ann in enumerate(anns): + # now only support compressed RLE format as segmentation results + ann['area'] = mask.area([ann['segmentation']])[0] + if not 'bbox' in ann: + ann['bbox'] = mask.toBbox([ann['segmentation']])[0] + ann['id'] = id+1 + ann['iscrowd'] = 0 + print 'DONE (t=%0.2fs)'%(time.time()- tic) + + res.dataset['annotations'] = anns + res.createIndex() + return res + + def download( self, tarDir = None, imgIds = [] ): + ''' + Download COCO images from mscoco.org server. + :param tarDir (str): COCO results directory name + imgIds (list): images to be downloaded + :return: + ''' + if tarDir is None: + print 'Please specify target directory' + return -1 + if len(imgIds) == 0: + imgs = self.imgs.values() + else: + imgs = self.loadImgs(imgIds) + N = len(imgs) + if not os.path.exists(tarDir): + os.makedirs(tarDir) + for i, img in enumerate(imgs): + tic = time.time() + fname = os.path.join(tarDir, img['file_name']) + if not os.path.exists(fname): + urllib.urlretrieve(img['coco_url'], fname) + print 'downloaded %d/%d images (t=%.1fs)'%(i, N, time.time()- tic) diff --git a/lib/pycocotools/cocoeval.py b/lib/pycocotools/cocoeval.py new file mode 100644 index 0000000..f389eb0 --- /dev/null +++ b/lib/pycocotools/cocoeval.py @@ -0,0 +1,444 @@ +__author__ = 'tsungyi' + +import numpy as np +import datetime +import time +from collections import defaultdict +import mask +import copy + +class COCOeval: + # Interface for evaluating detection on the Microsoft COCO dataset. + # + # The usage for CocoEval is as follows: + # cocoGt=..., cocoDt=... # load dataset and results + # E = CocoEval(cocoGt,cocoDt); # initialize CocoEval object + # E.params.recThrs = ...; # set parameters as desired + # E.evaluate(); # run per image evaluation + # E.accumulate(); # accumulate per image results + # E.summarize(); # display summary metrics of results + # For example usage see evalDemo.m and http://mscoco.org/. + # + # The evaluation parameters are as follows (defaults in brackets): + # imgIds - [all] N img ids to use for evaluation + # catIds - [all] K cat ids to use for evaluation + # iouThrs - [.5:.05:.95] T=10 IoU thresholds for evaluation + # recThrs - [0:.01:1] R=101 recall thresholds for evaluation + # areaRng - [...] A=4 object area ranges for evaluation + # maxDets - [1 10 100] M=3 thresholds on max detections per image + # useSegm - [1] if true evaluate against ground-truth segments + # useCats - [1] if true use category labels for evaluation # Note: if useSegm=0 the evaluation is run on bounding boxes. + # Note: if useCats=0 category labels are ignored as in proposal scoring. + # Note: multiple areaRngs [Ax2] and maxDets [Mx1] can be specified. + # + # evaluate(): evaluates detections on every image and every category and + # concats the results into the "evalImgs" with fields: + # dtIds - [1xD] id for each of the D detections (dt) + # gtIds - [1xG] id for each of the G ground truths (gt) + # dtMatches - [TxD] matching gt id at each IoU or 0 + # gtMatches - [TxG] matching dt id at each IoU or 0 + # dtScores - [1xD] confidence of each dt + # gtIgnore - [1xG] ignore flag for each gt + # dtIgnore - [TxD] ignore flag for each dt at each IoU + # + # accumulate(): accumulates the per-image, per-category evaluation + # results in "evalImgs" into the dictionary "eval" with fields: + # params - parameters used for evaluation + # date - date evaluation was performed + # counts - [T,R,K,A,M] parameter dimensions (see above) + # precision - [TxRxKxAxM] precision for every evaluation setting + # recall - [TxKxAxM] max recall for every evaluation setting + # Note: precision and recall==-1 for settings with no gt objects. + # + # See also coco, mask, pycocoDemo, pycocoEvalDemo + # + # Microsoft COCO Toolbox. version 2.0 + # Data, paper, and tutorials available at: http://mscoco.org/ + # Code written by Piotr Dollar and Tsung-Yi Lin, 2015. + # Licensed under the Simplified BSD License [see coco/license.txt] + def __init__(self, cocoGt=None, cocoDt=None): + ''' + Initialize CocoEval using coco APIs for gt and dt + :param cocoGt: coco object with ground truth annotations + :param cocoDt: coco object with detection results + :return: None + ''' + self.cocoGt = cocoGt # ground truth COCO API + self.cocoDt = cocoDt # detections COCO API + self.params = {} # evaluation parameters + self.evalImgs = defaultdict(list) # per-image per-category evaluation results [KxAxI] elements + self.eval = {} # accumulated evaluation results + self._gts = defaultdict(list) # gt for evaluation + self._dts = defaultdict(list) # dt for evaluation + self.params = Params() # parameters + self._paramsEval = {} # parameters for evaluation + self.stats = [] # result summarization + self.ious = {} # ious between all gts and dts + if not cocoGt is None: + self.params.imgIds = sorted(cocoGt.getImgIds()) + self.params.catIds = sorted(cocoGt.getCatIds()) + + + def _prepare(self): + ''' + Prepare ._gts and ._dts for evaluation based on params + :return: None + ''' + # + def _toMask(objs, coco): + # modify segmentation by reference + for obj in objs: + t = coco.imgs[obj['image_id']] + if type(obj['segmentation']) == list: + if type(obj['segmentation'][0]) == dict: + print 'debug' + obj['segmentation'] = mask.frPyObjects(obj['segmentation'],t['height'],t['width']) + if len(obj['segmentation']) == 1: + obj['segmentation'] = obj['segmentation'][0] + else: + # an object can have multiple polygon regions + # merge them into one RLE mask + obj['segmentation'] = mask.merge(obj['segmentation']) + elif type(obj['segmentation']) == dict and type(obj['segmentation']['counts']) == list: + obj['segmentation'] = mask.frPyObjects([obj['segmentation']],t['height'],t['width'])[0] + elif type(obj['segmentation']) == dict and \ + type(obj['segmentation']['counts'] == unicode or type(obj['segmentation']['counts']) == str): + pass + else: + raise Exception('segmentation format not supported.') + p = self.params + if p.useCats: + gts=self.cocoGt.loadAnns(self.cocoGt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds)) + dts=self.cocoDt.loadAnns(self.cocoDt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds)) + else: + gts=self.cocoGt.loadAnns(self.cocoGt.getAnnIds(imgIds=p.imgIds)) + dts=self.cocoDt.loadAnns(self.cocoDt.getAnnIds(imgIds=p.imgIds)) + + if p.useSegm: + _toMask(gts, self.cocoGt) + _toMask(dts, self.cocoDt) + self._gts = defaultdict(list) # gt for evaluation + self._dts = defaultdict(list) # dt for evaluation + for gt in gts: + self._gts[gt['image_id'], gt['category_id']].append(gt) + for dt in dts: + self._dts[dt['image_id'], dt['category_id']].append(dt) + self.evalImgs = defaultdict(list) # per-image per-category evaluation results + self.eval = {} # accumulated evaluation results + + def evaluate(self): + ''' + Run per image evaluation on given images and store results (a list of dict) in self.evalImgs + :return: None + ''' + tic = time.time() + print 'Running per image evaluation... ' + p = self.params + p.imgIds = list(np.unique(p.imgIds)) + if p.useCats: + p.catIds = list(np.unique(p.catIds)) + p.maxDets = sorted(p.maxDets) + self.params=p + + self._prepare() + # loop through images, area range, max detection number + catIds = p.catIds if p.useCats else [-1] + + computeIoU = self.computeIoU + self.ious = {(imgId, catId): computeIoU(imgId, catId) \ + for imgId in p.imgIds + for catId in catIds} + + evaluateImg = self.evaluateImg + maxDet = p.maxDets[-1] + self.evalImgs = [evaluateImg(imgId, catId, areaRng, maxDet) + for catId in catIds + for areaRng in p.areaRng + for imgId in p.imgIds + ] + self._paramsEval = copy.deepcopy(self.params) + toc = time.time() + print 'DONE (t=%0.2fs).'%(toc-tic) + + def computeIoU(self, imgId, catId): + p = self.params + if p.useCats: + gt = self._gts[imgId,catId] + dt = self._dts[imgId,catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId,cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId,cId]] + if len(gt) == 0 and len(dt) ==0: + return [] + dt = sorted(dt, key=lambda x: -x['score']) + if len(dt) > p.maxDets[-1]: + dt=dt[0:p.maxDets[-1]] + + if p.useSegm: + g = [g['segmentation'] for g in gt] + d = [d['segmentation'] for d in dt] + else: + g = [g['bbox'] for g in gt] + d = [d['bbox'] for d in dt] + + # compute iou between each dt and gt region + iscrowd = [int(o['iscrowd']) for o in gt] + ious = mask.iou(d,g,iscrowd) + return ious + + def evaluateImg(self, imgId, catId, aRng, maxDet): + ''' + perform evaluation for single category and image + :return: dict (single image results) + ''' + # + p = self.params + if p.useCats: + gt = self._gts[imgId,catId] + dt = self._dts[imgId,catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId,cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId,cId]] + if len(gt) == 0 and len(dt) ==0: + return None + + for g in gt: + if 'ignore' not in g: + g['ignore'] = 0 + if g['iscrowd'] == 1 or g['ignore'] or (g['area']aRng[1]): + g['_ignore'] = 1 + else: + g['_ignore'] = 0 + + # sort dt highest score first, sort gt ignore last + # gt = sorted(gt, key=lambda x: x['_ignore']) + gtind = [ind for (ind, g) in sorted(enumerate(gt), key=lambda (ind, g): g['_ignore']) ] + + gt = [gt[ind] for ind in gtind] + dt = sorted(dt, key=lambda x: -x['score'])[0:maxDet] + iscrowd = [int(o['iscrowd']) for o in gt] + # load computed ious + N_iou = len(self.ious[imgId, catId]) + ious = self.ious[imgId, catId][0:maxDet, np.array(gtind)] if N_iou >0 else self.ious[imgId, catId] + + T = len(p.iouThrs) + G = len(gt) + D = len(dt) + gtm = np.zeros((T,G)) + dtm = np.zeros((T,D)) + gtIg = np.array([g['_ignore'] for g in gt]) + dtIg = np.zeros((T,D)) + if not len(ious)==0: + for tind, t in enumerate(p.iouThrs): + for dind, d in enumerate(dt): + # information about best match so far (m=-1 -> unmatched) + iou = min([t,1-1e-10]) + m = -1 + for gind, g in enumerate(gt): + # if this gt already matched, and not a crowd, continue + if gtm[tind,gind]>0 and not iscrowd[gind]: + continue + # if dt matched to reg gt, and on ignore gt, stop + if m>-1 and gtIg[m]==0 and gtIg[gind]==1: + break + # continue to next gt unless better match made + if ious[dind,gind] < iou: + continue + # match successful and best so far, store appropriately + iou=ious[dind,gind] + m=gind + # if match made store id of match for both dt and gt + if m ==-1: + continue + dtIg[tind,dind] = gtIg[m] + dtm[tind,dind] = gt[m]['id'] + gtm[tind,m] = d['id'] + # set unmatched detections outside of area range to ignore + a = np.array([d['area']aRng[1] for d in dt]).reshape((1, len(dt))) + dtIg = np.logical_or(dtIg, np.logical_and(dtm==0, np.repeat(a,T,0))) + # store results for given image and category + return { + 'image_id': imgId, + 'category_id': catId, + 'aRng': aRng, + 'maxDet': maxDet, + 'dtIds': [d['id'] for d in dt], + 'gtIds': [g['id'] for g in gt], + 'dtMatches': dtm, + 'gtMatches': gtm, + 'dtScores': [d['score'] for d in dt], + 'gtIgnore': gtIg, + 'dtIgnore': dtIg, + } + + def accumulate(self, p = None): + ''' + Accumulate per image evaluation results and store the result in self.eval + :param p: input params for evaluation + :return: None + ''' + print 'Accumulating evaluation results... ' + tic = time.time() + if not self.evalImgs: + print 'Please run evaluate() first' + # allows input customized parameters + if p is None: + p = self.params + p.catIds = p.catIds if p.useCats == 1 else [-1] + T = len(p.iouThrs) + R = len(p.recThrs) + K = len(p.catIds) if p.useCats else 1 + A = len(p.areaRng) + M = len(p.maxDets) + precision = -np.ones((T,R,K,A,M)) # -1 for the precision of absent categories + recall = -np.ones((T,K,A,M)) + + # create dictionary for future indexing + _pe = self._paramsEval + catIds = _pe.catIds if _pe.useCats else [-1] + setK = set(catIds) + setA = set(map(tuple, _pe.areaRng)) + setM = set(_pe.maxDets) + setI = set(_pe.imgIds) + # get inds to evaluate + k_list = [n for n, k in enumerate(p.catIds) if k in setK] + m_list = [m for n, m in enumerate(p.maxDets) if m in setM] + a_list = [n for n, a in enumerate(map(lambda x: tuple(x), p.areaRng)) if a in setA] + i_list = [n for n, i in enumerate(p.imgIds) if i in setI] + # K0 = len(_pe.catIds) + I0 = len(_pe.imgIds) + A0 = len(_pe.areaRng) + # retrieve E at each category, area range, and max number of detections + for k, k0 in enumerate(k_list): + Nk = k0*A0*I0 + for a, a0 in enumerate(a_list): + Na = a0*I0 + for m, maxDet in enumerate(m_list): + E = [self.evalImgs[Nk+Na+i] for i in i_list] + E = filter(None, E) + if len(E) == 0: + continue + dtScores = np.concatenate([e['dtScores'][0:maxDet] for e in E]) + + # different sorting method generates slightly different results. + # mergesort is used to be consistent as Matlab implementation. + inds = np.argsort(-dtScores, kind='mergesort') + + dtm = np.concatenate([e['dtMatches'][:,0:maxDet] for e in E], axis=1)[:,inds] + dtIg = np.concatenate([e['dtIgnore'][:,0:maxDet] for e in E], axis=1)[:,inds] + gtIg = np.concatenate([e['gtIgnore'] for e in E]) + npig = len([ig for ig in gtIg if ig == 0]) + if npig == 0: + continue + tps = np.logical_and( dtm, np.logical_not(dtIg) ) + fps = np.logical_and(np.logical_not(dtm), np.logical_not(dtIg) ) + + tp_sum = np.cumsum(tps, axis=1).astype(dtype=np.float) + fp_sum = np.cumsum(fps, axis=1).astype(dtype=np.float) + for t, (tp, fp) in enumerate(zip(tp_sum, fp_sum)): + tp = np.array(tp) + fp = np.array(fp) + nd = len(tp) + rc = tp / npig + pr = tp / (fp+tp+np.spacing(1)) + q = np.zeros((R,)) + + if nd: + recall[t,k,a,m] = rc[-1] + else: + recall[t,k,a,m] = 0 + + # numpy is slow without cython optimization for accessing elements + # use python array gets significant speed improvement + pr = pr.tolist(); q = q.tolist() + + for i in range(nd-1, 0, -1): + if pr[i] > pr[i-1]: + pr[i-1] = pr[i] + + inds = np.searchsorted(rc, p.recThrs) + try: + for ri, pi in enumerate(inds): + q[ri] = pr[pi] + except: + pass + precision[t,:,k,a,m] = np.array(q) + self.eval = { + 'params': p, + 'counts': [T, R, K, A, M], + 'date': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + 'precision': precision, + 'recall': recall, + } + toc = time.time() + print 'DONE (t=%0.2fs).'%( toc-tic ) + + def summarize(self): + ''' + Compute and display summary metrics for evaluation results. + Note this functin can *only* be applied on the default parameter setting + ''' + def _summarize( ap=1, iouThr=None, areaRng='all', maxDets=100 ): + p = self.params + iStr = ' {:<18} {} @[ IoU={:<9} | area={:>6} | maxDets={:>3} ] = {}' + titleStr = 'Average Precision' if ap == 1 else 'Average Recall' + typeStr = '(AP)' if ap==1 else '(AR)' + iouStr = '%0.2f:%0.2f'%(p.iouThrs[0], p.iouThrs[-1]) if iouThr is None else '%0.2f'%(iouThr) + areaStr = areaRng + maxDetsStr = '%d'%(maxDets) + + aind = [i for i, aRng in enumerate(['all', 'small', 'medium', 'large']) if aRng == areaRng] + mind = [i for i, mDet in enumerate([1, 10, 100]) if mDet == maxDets] + if ap == 1: + # dimension of precision: [TxRxKxAxM] + s = self.eval['precision'] + # IoU + if iouThr is not None: + t = np.where(iouThr == p.iouThrs)[0] + s = s[t] + # areaRng + s = s[:,:,:,aind,mind] + else: + # dimension of recall: [TxKxAxM] + s = self.eval['recall'] + s = s[:,:,aind,mind] + if len(s[s>-1])==0: + mean_s = -1 + else: + mean_s = np.mean(s[s>-1]) + print iStr.format(titleStr, typeStr, iouStr, areaStr, maxDetsStr, '%.3f'%(float(mean_s))) + return mean_s + + if not self.eval: + raise Exception('Please run accumulate() first') + self.stats = np.zeros((12,)) + self.stats[0] = _summarize(1) + self.stats[1] = _summarize(1,iouThr=.5) + self.stats[2] = _summarize(1,iouThr=.75) + self.stats[3] = _summarize(1,areaRng='small') + self.stats[4] = _summarize(1,areaRng='medium') + self.stats[5] = _summarize(1,areaRng='large') + self.stats[6] = _summarize(0,maxDets=1) + self.stats[7] = _summarize(0,maxDets=10) + self.stats[8] = _summarize(0,maxDets=100) + self.stats[9] = _summarize(0,areaRng='small') + self.stats[10] = _summarize(0,areaRng='medium') + self.stats[11] = _summarize(0,areaRng='large') + + def __str__(self): + self.summarize() + +class Params: + ''' + Params for coco evaluation api + ''' + def __init__(self): + self.imgIds = [] + self.catIds = [] + # np.arange causes trouble. the data point on arange is slightly larger than the true value + self.iouThrs = np.linspace(.5, 0.95, np.round((0.95-.5)/.05)+1, endpoint=True) + self.recThrs = np.linspace(.0, 1.00, np.round((1.00-.0)/.01)+1, endpoint=True) + self.maxDets = [1,10,100] + self.areaRng = [ [0**2,1e5**2], [0**2, 32**2], [32**2, 96**2], [96**2, 1e5**2] ] + self.useSegm = 0 + self.useCats = 1 \ No newline at end of file diff --git a/lib/pycocotools/license.txt b/lib/pycocotools/license.txt new file mode 100644 index 0000000..495c163 --- /dev/null +++ b/lib/pycocotools/license.txt @@ -0,0 +1,26 @@ +Copyright (c) 2014, Piotr Dollar and Tsung-Yi Lin +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The views and conclusions contained in the software and documentation are those +of the authors and should not be interpreted as representing official policies, +either expressed or implied, of the FreeBSD Project. diff --git a/lib/pycocotools/mask.py b/lib/pycocotools/mask.py new file mode 100644 index 0000000..6732546 --- /dev/null +++ b/lib/pycocotools/mask.py @@ -0,0 +1,82 @@ +__author__ = 'tsungyi' + +import pycocotools._mask as _mask + +# Interface for manipulating masks stored in RLE format. +# +# RLE is a simple yet efficient format for storing binary masks. RLE +# first divides a vector (or vectorized image) into a series of piecewise +# constant regions and then for each piece simply stores the length of +# that piece. For example, given M=[0 0 1 1 1 0 1] the RLE counts would +# be [2 3 1 1], or for M=[1 1 1 1 1 1 0] the counts would be [0 6 1] +# (note that the odd counts are always the numbers of zeros). Instead of +# storing the counts directly, additional compression is achieved with a +# variable bitrate representation based on a common scheme called LEB128. +# +# Compression is greatest given large piecewise constant regions. +# Specifically, the size of the RLE is proportional to the number of +# *boundaries* in M (or for an image the number of boundaries in the y +# direction). Assuming fairly simple shapes, the RLE representation is +# O(sqrt(n)) where n is number of pixels in the object. Hence space usage +# is substantially lower, especially for large simple objects (large n). +# +# Many common operations on masks can be computed directly using the RLE +# (without need for decoding). This includes computations such as area, +# union, intersection, etc. All of these operations are linear in the +# size of the RLE, in other words they are O(sqrt(n)) where n is the area +# of the object. Computing these operations on the original mask is O(n). +# Thus, using the RLE can result in substantial computational savings. +# +# The following API functions are defined: +# encode - Encode binary masks using RLE. +# decode - Decode binary masks encoded via RLE. +# merge - Compute union or intersection of encoded masks. +# iou - Compute intersection over union between masks. +# area - Compute area of encoded masks. +# toBbox - Get bounding boxes surrounding encoded masks. +# frPyObjects - Convert polygon, bbox, and uncompressed RLE to encoded RLE mask. +# +# Usage: +# Rs = encode( masks ) +# masks = decode( Rs ) +# R = merge( Rs, intersect=false ) +# o = iou( dt, gt, iscrowd ) +# a = area( Rs ) +# bbs = toBbox( Rs ) +# Rs = frPyObjects( [pyObjects], h, w ) +# +# In the API the following formats are used: +# Rs - [dict] Run-length encoding of binary masks +# R - dict Run-length encoding of binary mask +# masks - [hxwxn] Binary mask(s) (must have type np.ndarray(dtype=uint8) in column-major order) +# iscrowd - [nx1] list of np.ndarray. 1 indicates corresponding gt image has crowd region to ignore +# bbs - [nx4] Bounding box(es) stored as [x y w h] +# poly - Polygon stored as [[x1 y1 x2 y2...],[x1 y1 ...],...] (2D list) +# dt,gt - May be either bounding boxes or encoded masks +# Both poly and bbs are 0-indexed (bbox=[0 0 1 1] encloses first pixel). +# +# Finally, a note about the intersection over union (iou) computation. +# The standard iou of a ground truth (gt) and detected (dt) object is +# iou(gt,dt) = area(intersect(gt,dt)) / area(union(gt,dt)) +# For "crowd" regions, we use a modified criteria. If a gt object is +# marked as "iscrowd", we allow a dt to match any subregion of the gt. +# Choosing gt' in the crowd gt that best matches the dt can be done using +# gt'=intersect(dt,gt). Since by definition union(gt',dt)=dt, computing +# iou(gt,dt,iscrowd) = iou(gt',dt) = area(intersect(gt,dt)) / area(dt) +# For crowd gt regions we use this modified criteria above for the iou. +# +# To compile run "python setup.py build_ext --inplace" +# Please do not contact us for help with compiling. +# +# Microsoft COCO Toolbox. version 2.0 +# Data, paper, and tutorials available at: http://mscoco.org/ +# Code written by Piotr Dollar and Tsung-Yi Lin, 2015. +# Licensed under the Simplified BSD License [see coco/license.txt] + +encode = _mask.encode +decode = _mask.decode +iou = _mask.iou +merge = _mask.merge +area = _mask.area +toBbox = _mask.toBbox +frPyObjects = _mask.frPyObjects \ No newline at end of file diff --git a/lib/pycocotools/maskApi.c b/lib/pycocotools/maskApi.c new file mode 100644 index 0000000..2b2d891 --- /dev/null +++ b/lib/pycocotools/maskApi.c @@ -0,0 +1,208 @@ +/************************************************************************** +* Microsoft COCO Toolbox. version 2.0 +* Data, paper, and tutorials available at: http://mscoco.org/ +* Code written by Piotr Dollar and Tsung-Yi Lin, 2015. +* Licensed under the Simplified BSD License [see coco/license.txt] +**************************************************************************/ +#include "maskApi.h" +#include +#include + +uint umin( uint a, uint b ) { return (ab) ? a : b; } + +void rleInit( RLE *R, siz h, siz w, siz m, uint *cnts ) { + R->h=h; R->w=w; R->m=m; R->cnts=(m==0)?0:malloc(sizeof(uint)*m); + if(cnts) for(siz j=0; jcnts[j]=cnts[j]; +} + +void rleFree( RLE *R ) { + free(R->cnts); R->cnts=0; +} + +void rlesInit( RLE **R, siz n ) { + *R = (RLE*) malloc(sizeof(RLE)*n); + for(siz i=0; i0 ) { + c=umin(ca,cb); cc+=c; ct=0; + ca-=c; if(!ca && a0) { + crowd=iscrowd!=NULL && iscrowd[g]; + if(dt[d].h!=gt[g].h || dt[d].w!=gt[g].w) { o[g*m+d]=-1; continue; } + siz ka, kb, a, b; uint c, ca, cb, ct, i, u; bool va, vb; + ca=dt[d].cnts[0]; ka=dt[d].m; va=vb=0; + cb=gt[g].cnts[0]; kb=gt[g].m; a=b=1; i=u=0; ct=1; + while( ct>0 ) { + c=umin(ca,cb); if(va||vb) { u+=c; if(va&&vb) i+=c; } ct=0; + ca-=c; if(!ca && ad?1:c=dy && xs>xe) || (dxye); + if(flip) { t=xs; xs=xe; xe=t; t=ys; ys=ye; ye=t; } + s = dx>=dy ? (double)(ye-ys)/dx : (double)(xe-xs)/dy; + if(dx>=dy) for( int d=0; d<=dx; d++ ) { + t=flip?dx-d:d; u[m]=t+xs; v[m]=(int)(ys+s*t+.5); m++; + } else for( int d=0; d<=dy; d++ ) { + t=flip?dy-d:d; v[m]=t+ys; u[m]=(int)(xs+s*t+.5); m++; + } + } + // get points along y-boundary and downsample + free(x); free(y); k=m; m=0; double xd, yd; + x=malloc(sizeof(int)*k); y=malloc(sizeof(int)*k); + for( j=1; jw-1 ) continue; + yd=(double)(v[j]h) yd=h; yd=ceil(yd); + x[m]=(int) xd; y[m]=(int) yd; m++; + } + // compute rle encoding given y-boundary points + k=m; a=malloc(sizeof(uint)*(k+1)); + for( j=0; j0) b[m++]=a[j++]; else { + j++; if(jm, p=0; long x; bool more; + char *s=malloc(sizeof(char)*m*6); + for( i=0; icnts[i]; if(i>2) x-=(long) R->cnts[i-2]; more=1; + while( more ) { + char c=x & 0x1f; x >>= 5; more=(c & 0x10) ? x!=-1 : x!=0; + if(more) c |= 0x20; c+=48; s[p++]=c; + } + } + s[p]=0; return s; +} + +void rleFrString( RLE *R, char *s, siz h, siz w ) { + siz m=0, p=0, k; long x; bool more; uint *cnts; + while( s[m] ) m++; cnts=malloc(sizeof(uint)*m); m=0; + while( s[p] ) { + x=0; k=0; more=1; + while( more ) { + char c=s[p]-48; x |= (c & 0x1f) << 5*k; + more = c & 0x20; p++; k++; + if(!more && (c & 0x10)) x |= -1 << 5*k; + } + if(m>2) x+=(long) cnts[m-2]; cnts[m++]=(uint) x; + } + rleInit(R,h,w,m,cnts); free(cnts); +} diff --git a/lib/pycocotools/maskApi.h b/lib/pycocotools/maskApi.h new file mode 100644 index 0000000..ff16116 --- /dev/null +++ b/lib/pycocotools/maskApi.h @@ -0,0 +1,55 @@ +/************************************************************************** +* Microsoft COCO Toolbox. version 2.0 +* Data, paper, and tutorials available at: http://mscoco.org/ +* Code written by Piotr Dollar and Tsung-Yi Lin, 2015. +* Licensed under the Simplified BSD License [see coco/license.txt] +**************************************************************************/ +#pragma once +#include + +typedef unsigned int uint; +typedef unsigned long siz; +typedef unsigned char byte; +typedef double* BB; +typedef struct { siz h, w, m; uint *cnts; } RLE; + +// Initialize/destroy RLE. +void rleInit( RLE *R, siz h, siz w, siz m, uint *cnts ); +void rleFree( RLE *R ); + +// Initialize/destroy RLE array. +void rlesInit( RLE **R, siz n ); +void rlesFree( RLE **R, siz n ); + +// Encode binary masks using RLE. +void rleEncode( RLE *R, const byte *mask, siz h, siz w, siz n ); + +// Decode binary masks encoded via RLE. +void rleDecode( const RLE *R, byte *mask, siz n ); + +// Compute union or intersection of encoded masks. +void rleMerge( const RLE *R, RLE *M, siz n, bool intersect ); + +// Compute area of encoded masks. +void rleArea( const RLE *R, siz n, uint *a ); + +// Compute intersection over union between masks. +void rleIou( RLE *dt, RLE *gt, siz m, siz n, byte *iscrowd, double *o ); + +// Compute intersection over union between bounding boxes. +void bbIou( BB dt, BB gt, siz m, siz n, byte *iscrowd, double *o ); + +// Get bounding boxes surrounding encoded masks. +void rleToBbox( const RLE *R, BB bb, siz n ); + +// Convert bounding boxes to encoded masks. +void rleFrBbox( RLE *R, const BB bb, siz h, siz w, siz n ); + +// Convert polygon to encoded mask. +void rleFrPoly( RLE *R, const double *xy, siz k, siz h, siz w ); + +// Get compressed string representation of encoded mask. +char* rleToString( const RLE *R ); + +// Convert from compressed string representation of encoded mask. +void rleFrString( RLE *R, char *s, siz h, siz w ); diff --git a/lib/roi_data_layer/__init__.py b/lib/roi_data_layer/__init__.py new file mode 100644 index 0000000..7ba6a65 --- /dev/null +++ b/lib/roi_data_layer/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- diff --git a/lib/roi_data_layer/layer.py b/lib/roi_data_layer/layer.py new file mode 100644 index 0000000..04f4172 --- /dev/null +++ b/lib/roi_data_layer/layer.py @@ -0,0 +1,196 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""The data layer used during training to train a Fast R-CNN network. + +RoIDataLayer implements a Caffe Python layer. +""" + +import caffe +from fast_rcnn.config import cfg +from roi_data_layer.minibatch import get_minibatch +import numpy as np +import yaml +from multiprocessing import Process, Queue + +class RoIDataLayer(caffe.Layer): + """Fast R-CNN data layer used for training.""" + + def _shuffle_roidb_inds(self): + """Randomly permute the training roidb.""" + if cfg.TRAIN.ASPECT_GROUPING: + widths = np.array([r['width'] for r in self._roidb]) + heights = np.array([r['height'] for r in self._roidb]) + horz = (widths >= heights) + vert = np.logical_not(horz) + horz_inds = np.where(horz)[0] + vert_inds = np.where(vert)[0] + inds = np.hstack(( + np.random.permutation(horz_inds), + np.random.permutation(vert_inds))) + inds = np.reshape(inds, (-1, 2)) + row_perm = np.random.permutation(np.arange(inds.shape[0])) + inds = np.reshape(inds[row_perm, :], (-1,)) + self._perm = inds + else: + self._perm = np.random.permutation(np.arange(len(self._roidb))) + self._cur = 0 + + def _get_next_minibatch_inds(self): + """Return the roidb indices for the next minibatch.""" + if self._cur + cfg.TRAIN.IMS_PER_BATCH >= len(self._roidb): + self._shuffle_roidb_inds() + + db_inds = self._perm[self._cur:self._cur + cfg.TRAIN.IMS_PER_BATCH] + self._cur += cfg.TRAIN.IMS_PER_BATCH + return db_inds + + def _get_next_minibatch(self): + """Return the blobs to be used for the next minibatch. + + If cfg.TRAIN.USE_PREFETCH is True, then blobs will be computed in a + separate process and made available through self._blob_queue. + """ + if cfg.TRAIN.USE_PREFETCH: + return self._blob_queue.get() + else: + db_inds = self._get_next_minibatch_inds() + minibatch_db = [self._roidb[i] for i in db_inds] + return get_minibatch(minibatch_db, self._num_classes) + + def set_roidb(self, roidb): + """Set the roidb to be used by this layer during training.""" + self._roidb = roidb + self._shuffle_roidb_inds() + if cfg.TRAIN.USE_PREFETCH: + self._blob_queue = Queue(10) + self._prefetch_process = BlobFetcher(self._blob_queue, + self._roidb, + self._num_classes) + self._prefetch_process.start() + # Terminate the child process when the parent exists + def cleanup(): + print 'Terminating BlobFetcher' + self._prefetch_process.terminate() + self._prefetch_process.join() + import atexit + atexit.register(cleanup) + + def setup(self, bottom, top): + """Setup the RoIDataLayer.""" + + # parse the layer parameter string, which must be valid YAML + layer_params = yaml.load(self.param_str) + + self._num_classes = layer_params['num_classes'] + + self._name_to_top_map = {} + + # data blob: holds a batch of N images, each with 3 channels + idx = 0 + top[idx].reshape(cfg.TRAIN.IMS_PER_BATCH, 3, + max(cfg.TRAIN.SCALES), cfg.TRAIN.MAX_SIZE) + self._name_to_top_map['data'] = idx + idx += 1 + + if cfg.TRAIN.HAS_RPN: + top[idx].reshape(1, 3) + self._name_to_top_map['im_info'] = idx + idx += 1 + + top[idx].reshape(1, 4) + self._name_to_top_map['gt_boxes'] = idx + idx += 1 + else: # not using RPN + # rois blob: holds R regions of interest, each is a 5-tuple + # (n, x1, y1, x2, y2) specifying an image batch index n and a + # rectangle (x1, y1, x2, y2) + top[idx].reshape(1, 5) + self._name_to_top_map['rois'] = idx + idx += 1 + + # labels blob: R categorical labels in [0, ..., K] for K foreground + # classes plus background + top[idx].reshape(1) + self._name_to_top_map['labels'] = idx + idx += 1 + + if cfg.TRAIN.BBOX_REG: + # bbox_targets blob: R bounding-box regression targets with 4 + # targets per class + top[idx].reshape(1, self._num_classes * 4) + self._name_to_top_map['bbox_targets'] = idx + idx += 1 + + # bbox_inside_weights blob: At most 4 targets per roi are active; + # thisbinary vector sepcifies the subset of active targets + top[idx].reshape(1, self._num_classes * 4) + self._name_to_top_map['bbox_inside_weights'] = idx + idx += 1 + + top[idx].reshape(1, self._num_classes * 4) + self._name_to_top_map['bbox_outside_weights'] = idx + idx += 1 + + print 'RoiDataLayer: name_to_top:', self._name_to_top_map + assert len(top) == len(self._name_to_top_map) + + def forward(self, bottom, top): + """Get blobs and copy them into this layer's top blob vector.""" + blobs = self._get_next_minibatch() + + for blob_name, blob in blobs.iteritems(): + top_ind = self._name_to_top_map[blob_name] + # Reshape net's input blobs + top[top_ind].reshape(*(blob.shape)) + # Copy data into net's input blobs + top[top_ind].data[...] = blob.astype(np.float32, copy=False) + + def backward(self, top, propagate_down, bottom): + """This layer does not propagate gradients.""" + pass + + def reshape(self, bottom, top): + """Reshaping happens during the call to forward.""" + pass + +class BlobFetcher(Process): + """Experimental class for prefetching blobs in a separate process.""" + def __init__(self, queue, roidb, num_classes): + super(BlobFetcher, self).__init__() + self._queue = queue + self._roidb = roidb + self._num_classes = num_classes + self._perm = None + self._cur = 0 + self._shuffle_roidb_inds() + # fix the random seed for reproducibility + np.random.seed(cfg.RNG_SEED) + + def _shuffle_roidb_inds(self): + """Randomly permute the training roidb.""" + # TODO(rbg): remove duplicated code + self._perm = np.random.permutation(np.arange(len(self._roidb))) + self._cur = 0 + + def _get_next_minibatch_inds(self): + """Return the roidb indices for the next minibatch.""" + # TODO(rbg): remove duplicated code + if self._cur + cfg.TRAIN.IMS_PER_BATCH >= len(self._roidb): + self._shuffle_roidb_inds() + + db_inds = self._perm[self._cur:self._cur + cfg.TRAIN.IMS_PER_BATCH] + self._cur += cfg.TRAIN.IMS_PER_BATCH + return db_inds + + def run(self): + print 'BlobFetcher started' + while True: + db_inds = self._get_next_minibatch_inds() + minibatch_db = [self._roidb[i] for i in db_inds] + blobs = get_minibatch(minibatch_db, self._num_classes) + self._queue.put(blobs) diff --git a/lib/roi_data_layer/minibatch.py b/lib/roi_data_layer/minibatch.py new file mode 100644 index 0000000..f4535b0 --- /dev/null +++ b/lib/roi_data_layer/minibatch.py @@ -0,0 +1,199 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Compute minibatch blobs for training a Fast R-CNN network.""" + +import numpy as np +import numpy.random as npr +import cv2 +from fast_rcnn.config import cfg +from utils.blob import prep_im_for_blob, im_list_to_blob + +def get_minibatch(roidb, num_classes): + """Given a roidb, construct a minibatch sampled from it.""" + num_images = len(roidb) + # Sample random scales to use for each image in this batch + random_scale_inds = npr.randint(0, high=len(cfg.TRAIN.SCALES), + size=num_images) + assert(cfg.TRAIN.BATCH_SIZE % num_images == 0), \ + 'num_images ({}) must divide BATCH_SIZE ({})'. \ + format(num_images, cfg.TRAIN.BATCH_SIZE) + rois_per_image = cfg.TRAIN.BATCH_SIZE / num_images + fg_rois_per_image = np.round(cfg.TRAIN.FG_FRACTION * rois_per_image) + + # Get the input image blob, formatted for caffe + im_blob, im_scales = _get_image_blob(roidb, random_scale_inds) + + blobs = {'data': im_blob} + + if cfg.TRAIN.HAS_RPN: + assert len(im_scales) == 1, "Single batch only" + assert len(roidb) == 1, "Single batch only" + # gt boxes: (x1, y1, x2, y2, cls) + gt_inds = np.where(roidb[0]['gt_classes'] != 0)[0] + gt_boxes = np.empty((len(gt_inds), 5), dtype=np.float32) + gt_boxes[:, 0:4] = roidb[0]['boxes'][gt_inds, :] * im_scales[0] + gt_boxes[:, 4] = roidb[0]['gt_classes'][gt_inds] + blobs['gt_boxes'] = gt_boxes + blobs['im_info'] = np.array( + [[im_blob.shape[2], im_blob.shape[3], im_scales[0]]], + dtype=np.float32) + else: # not using RPN + # Now, build the region of interest and label blobs + rois_blob = np.zeros((0, 5), dtype=np.float32) + labels_blob = np.zeros((0), dtype=np.float32) + bbox_targets_blob = np.zeros((0, 4 * num_classes), dtype=np.float32) + bbox_inside_blob = np.zeros(bbox_targets_blob.shape, dtype=np.float32) + # all_overlaps = [] + for im_i in xrange(num_images): + labels, overlaps, im_rois, bbox_targets, bbox_inside_weights \ + = _sample_rois(roidb[im_i], fg_rois_per_image, rois_per_image, + num_classes) + + # Add to RoIs blob + rois = _project_im_rois(im_rois, im_scales[im_i]) + batch_ind = im_i * np.ones((rois.shape[0], 1)) + rois_blob_this_image = np.hstack((batch_ind, rois)) + rois_blob = np.vstack((rois_blob, rois_blob_this_image)) + + # Add to labels, bbox targets, and bbox loss blobs + labels_blob = np.hstack((labels_blob, labels)) + bbox_targets_blob = np.vstack((bbox_targets_blob, bbox_targets)) + bbox_inside_blob = np.vstack((bbox_inside_blob, bbox_inside_weights)) + # all_overlaps = np.hstack((all_overlaps, overlaps)) + + # For debug visualizations + # _vis_minibatch(im_blob, rois_blob, labels_blob, all_overlaps) + + blobs['rois'] = rois_blob + blobs['labels'] = labels_blob + + if cfg.TRAIN.BBOX_REG: + blobs['bbox_targets'] = bbox_targets_blob + blobs['bbox_inside_weights'] = bbox_inside_blob + blobs['bbox_outside_weights'] = \ + np.array(bbox_inside_blob > 0).astype(np.float32) + + return blobs + +def _sample_rois(roidb, fg_rois_per_image, rois_per_image, num_classes): + """Generate a random sample of RoIs comprising foreground and background + examples. + """ + # label = class RoI has max overlap with + labels = roidb['max_classes'] + overlaps = roidb['max_overlaps'] + rois = roidb['boxes'] + + # Select foreground RoIs as those with >= FG_THRESH overlap + fg_inds = np.where(overlaps >= cfg.TRAIN.FG_THRESH)[0] + # Guard against the case when an image has fewer than fg_rois_per_image + # foreground RoIs + fg_rois_per_this_image = np.minimum(fg_rois_per_image, fg_inds.size) + # Sample foreground regions without replacement + if fg_inds.size > 0: + fg_inds = npr.choice( + fg_inds, size=fg_rois_per_this_image, replace=False) + + # Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI) + bg_inds = np.where((overlaps < cfg.TRAIN.BG_THRESH_HI) & + (overlaps >= cfg.TRAIN.BG_THRESH_LO))[0] + # Compute number of background RoIs to take from this image (guarding + # against there being fewer than desired) + bg_rois_per_this_image = rois_per_image - fg_rois_per_this_image + bg_rois_per_this_image = np.minimum(bg_rois_per_this_image, + bg_inds.size) + # Sample foreground regions without replacement + if bg_inds.size > 0: + bg_inds = npr.choice( + bg_inds, size=bg_rois_per_this_image, replace=False) + + # The indices that we're selecting (both fg and bg) + keep_inds = np.append(fg_inds, bg_inds) + # Select sampled values from various arrays: + labels = labels[keep_inds] + # Clamp labels for the background RoIs to 0 + labels[fg_rois_per_this_image:] = 0 + overlaps = overlaps[keep_inds] + rois = rois[keep_inds] + + bbox_targets, bbox_inside_weights = _get_bbox_regression_labels( + roidb['bbox_targets'][keep_inds, :], num_classes) + + return labels, overlaps, rois, bbox_targets, bbox_inside_weights + +def _get_image_blob(roidb, scale_inds): + """Builds an input blob from the images in the roidb at the specified + scales. + """ + num_images = len(roidb) + processed_ims = [] + im_scales = [] + for i in xrange(num_images): + im = cv2.imread(roidb[i]['image']) + if roidb[i]['flipped']: + im = im[:, ::-1, :] + target_size = cfg.TRAIN.SCALES[scale_inds[i]] + im, im_scale = prep_im_for_blob(im, cfg.PIXEL_MEANS, target_size, + cfg.TRAIN.MAX_SIZE) + im_scales.append(im_scale) + processed_ims.append(im) + + # Create a blob to hold the input images + blob = im_list_to_blob(processed_ims) + + return blob, im_scales + +def _project_im_rois(im_rois, im_scale_factor): + """Project image RoIs into the rescaled training image.""" + rois = im_rois * im_scale_factor + return rois + +def _get_bbox_regression_labels(bbox_target_data, num_classes): + """Bounding-box regression targets are stored in a compact form in the + roidb. + + This function expands those targets into the 4-of-4*K representation used + by the network (i.e. only one class has non-zero targets). The loss weights + are similarly expanded. + + Returns: + bbox_target_data (ndarray): N x 4K blob of regression targets + bbox_inside_weights (ndarray): N x 4K blob of loss weights + """ + clss = bbox_target_data[:, 0] + bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32) + bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32) + inds = np.where(clss > 0)[0] + for ind in inds: + cls = clss[ind] + start = 4 * cls + end = start + 4 + bbox_targets[ind, start:end] = bbox_target_data[ind, 1:] + bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS + return bbox_targets, bbox_inside_weights + +def _vis_minibatch(im_blob, rois_blob, labels_blob, overlaps): + """Visualize a mini-batch for debugging.""" + import matplotlib.pyplot as plt + for i in xrange(rois_blob.shape[0]): + rois = rois_blob[i, :] + im_ind = rois[0] + roi = rois[1:] + im = im_blob[im_ind, :, :, :].transpose((1, 2, 0)).copy() + im += cfg.PIXEL_MEANS + im = im[:, :, (2, 1, 0)] + im = im.astype(np.uint8) + cls = labels_blob[i] + plt.imshow(im) + print 'class: ', cls, ' overlap: ', overlaps[i] + plt.gca().add_patch( + plt.Rectangle((roi[0], roi[1]), roi[2] - roi[0], + roi[3] - roi[1], fill=False, + edgecolor='r', linewidth=3) + ) + plt.show() diff --git a/lib/roi_data_layer/roidb.py b/lib/roi_data_layer/roidb.py new file mode 100644 index 0000000..f2bd231 --- /dev/null +++ b/lib/roi_data_layer/roidb.py @@ -0,0 +1,133 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Transform a roidb into a trainable roidb by adding a bunch of metadata.""" + +import numpy as np +from fast_rcnn.config import cfg +from fast_rcnn.bbox_transform import bbox_transform +from utils.cython_bbox import bbox_overlaps +import PIL + +def prepare_roidb(imdb): + """Enrich the imdb's roidb by adding some derived quantities that + are useful for training. This function precomputes the maximum + overlap, taken over ground-truth boxes, between each ROI and + each ground-truth box. The class with maximum overlap is also + recorded. + """ + sizes = [PIL.Image.open(imdb.image_path_at(i)).size + for i in xrange(imdb.num_images)] + roidb = imdb.roidb + for i in xrange(len(imdb.image_index)): + roidb[i]['image'] = imdb.image_path_at(i) + roidb[i]['width'] = sizes[i][0] + roidb[i]['height'] = sizes[i][1] + # need gt_overlaps as a dense array for argmax + gt_overlaps = roidb[i]['gt_overlaps'].toarray() + # max overlap with gt over classes (columns) + max_overlaps = gt_overlaps.max(axis=1) + # gt class that had the max overlap + max_classes = gt_overlaps.argmax(axis=1) + roidb[i]['max_classes'] = max_classes + roidb[i]['max_overlaps'] = max_overlaps + # sanity checks + # max overlap of 0 => class should be zero (background) + zero_inds = np.where(max_overlaps == 0)[0] + assert all(max_classes[zero_inds] == 0) + # max overlap > 0 => class should not be zero (must be a fg class) + nonzero_inds = np.where(max_overlaps > 0)[0] + assert all(max_classes[nonzero_inds] != 0) + +def add_bbox_regression_targets(roidb): + """Add information needed to train bounding-box regressors.""" + assert len(roidb) > 0 + assert 'max_classes' in roidb[0], 'Did you call prepare_roidb first?' + + num_images = len(roidb) + # Infer number of classes from the number of columns in gt_overlaps + num_classes = 2 if cfg.TRAIN.AGONISTIC else roidb[0]['gt_overlaps'].shape[1] + for im_i in xrange(num_images): + rois = roidb[im_i]['boxes'] + max_overlaps = roidb[im_i]['max_overlaps'] + max_classes = roidb[im_i]['max_classes'] + roidb[im_i]['bbox_targets'] = \ + _compute_targets(rois, max_overlaps, max_classes) + + if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED: + # Use fixed / precomputed "means" and "stds" instead of empirical values + means = np.tile( + np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS), (num_classes, 1)) + stds = np.tile( + np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS), (num_classes, 1)) + else: + # Compute values needed for means and stds + # var(x) = E(x^2) - E(x)^2 + class_counts = np.zeros((num_classes, 1)) + cfg.EPS + sums = np.zeros((num_classes, 4)) + squared_sums = np.zeros((num_classes, 4)) + for im_i in xrange(num_images): + targets = roidb[im_i]['bbox_targets'] + for cls in xrange(1, num_classes): + cls_inds = np.where(targets[:, 0] == cls)[0] + if cls_inds.size > 0: + class_counts[cls] += cls_inds.size + sums[cls, :] += targets[cls_inds, 1:].sum(axis=0) + squared_sums[cls, :] += \ + (targets[cls_inds, 1:] ** 2).sum(axis=0) + + means = sums / class_counts + stds = np.sqrt(squared_sums / class_counts - means ** 2) + + print 'bbox target means:' + print means + print means[1:, :].mean(axis=0) # ignore bg class + print 'bbox target stdevs:' + print stds + print stds[1:, :].mean(axis=0) # ignore bg class + + # Normalize targets + if cfg.TRAIN.BBOX_NORMALIZE_TARGETS: + print "Normalizing targets" + for im_i in xrange(num_images): + targets = roidb[im_i]['bbox_targets'] + for cls in xrange(1, num_classes): + cls_inds = np.where(targets[:, 0] == cls)[0] + roidb[im_i]['bbox_targets'][cls_inds, 1:] -= means[cls, :] + roidb[im_i]['bbox_targets'][cls_inds, 1:] /= stds[cls, :] + else: + print "NOT normalizing targets" + + # These values will be needed for making predictions + # (the predicts will need to be unnormalized and uncentered) + return means.ravel(), stds.ravel() + +def _compute_targets(rois, overlaps, labels): + """Compute bounding-box regression targets for an image.""" + # Indices of ground-truth ROIs + gt_inds = np.where(overlaps == 1)[0] + if len(gt_inds) == 0: + # Bail if the image has no ground-truth ROIs + return np.zeros((rois.shape[0], 5), dtype=np.float32) + # Indices of examples for which we try to make predictions + ex_inds = np.where(overlaps >= cfg.TRAIN.BBOX_THRESH)[0] + + # Get IoU overlap between each ex ROI and gt ROI + ex_gt_overlaps = bbox_overlaps( + np.ascontiguousarray(rois[ex_inds, :], dtype=np.float), + np.ascontiguousarray(rois[gt_inds, :], dtype=np.float)) + + # Find which gt ROI each ex ROI has max overlap with: + # this will be the ex ROI's gt target + gt_assignment = ex_gt_overlaps.argmax(axis=1) + gt_rois = rois[gt_inds[gt_assignment], :] + ex_rois = rois[ex_inds, :] + + targets = np.zeros((rois.shape[0], 5), dtype=np.float32) + targets[ex_inds, 0] = labels[ex_inds] + targets[ex_inds, 1:] = bbox_transform(ex_rois, gt_rois) + return targets diff --git a/lib/rpn/README.md b/lib/rpn/README.md new file mode 100644 index 0000000..80abf48 --- /dev/null +++ b/lib/rpn/README.md @@ -0,0 +1,23 @@ +### `rpn` module overview + +##### `generate_anchors.py` + +Generates a regular grid of multi-scale, multi-aspect anchor boxes. + +##### `proposal_layer.py` + +Converts RPN outputs (per-anchor scores and bbox regression estimates) into object proposals. + +##### `anchor_target_layer.py` + +Generates training targets/labels for each anchor. Classification labels are 1 (object), 0 (not object) or -1 (ignore). +Bbox regression targets are specified when the classification label is > 0. + +##### `proposal_target_layer.py` + +Generates training targets/labels for each object proposal: classification labels 0 - K (bg or object class 1, ... , K) +and bbox regression targets in that case that the label is > 0. + +##### `generate.py` + +Generate object detection proposals from an imdb using an RPN. diff --git a/lib/rpn/__init__.py b/lib/rpn/__init__.py new file mode 100644 index 0000000..23b31b2 --- /dev/null +++ b/lib/rpn/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick and Sean Bell +# -------------------------------------------------------- diff --git a/lib/rpn/anchor_target_layer.py b/lib/rpn/anchor_target_layer.py new file mode 100644 index 0000000..3934cdd --- /dev/null +++ b/lib/rpn/anchor_target_layer.py @@ -0,0 +1,281 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick and Sean Bell +# -------------------------------------------------------- + +import os +import caffe +import yaml +from fast_rcnn.config import cfg +import numpy as np +import numpy.random as npr +from generate_anchors import generate_anchors +from utils.cython_bbox import bbox_overlaps +from fast_rcnn.bbox_transform import bbox_transform + +DEBUG = False + +class AnchorTargetLayer(caffe.Layer): + """ + Assign anchors to ground-truth targets. Produces anchor classification + labels and bounding-box regression targets. + """ + + def setup(self, bottom, top): + layer_params = yaml.load(self.param_str) + anchor_scales = layer_params.get('scales', (8, 16, 32)) + self._anchors = generate_anchors(scales=np.array(anchor_scales)) + self._num_anchors = self._anchors.shape[0] + self._feat_stride = layer_params['feat_stride'] + + if DEBUG: + print 'anchors:' + print self._anchors + print 'anchor shapes:' + print np.hstack(( + self._anchors[:, 2::4] - self._anchors[:, 0::4], + self._anchors[:, 3::4] - self._anchors[:, 1::4], + )) + self._counts = cfg.EPS + self._sums = np.zeros((1, 4)) + self._squared_sums = np.zeros((1, 4)) + self._fg_sum = 0 + self._bg_sum = 0 + self._count = 0 + + # allow boxes to sit over the edge by a small amount + self._allowed_border = layer_params.get('allowed_border', 0) + + height, width = bottom[0].data.shape[-2:] + if DEBUG: + print 'AnchorTargetLayer: height', height, 'width', width + + A = self._num_anchors + # labels + top[0].reshape(1, 1, A * height, width) + # bbox_targets + top[1].reshape(1, A * 4, height, width) + # bbox_inside_weights + top[2].reshape(1, A * 4, height, width) + # bbox_outside_weights + top[3].reshape(1, A * 4, height, width) + + def forward(self, bottom, top): + # Algorithm: + # + # for each (H, W) location i + # generate 9 anchor boxes centered on cell i + # apply predicted bbox deltas at cell i to each of the 9 anchors + # filter out-of-image anchors + # measure GT overlap + + assert bottom[0].data.shape[0] == 1, \ + 'Only single item batches are supported' + + # map of shape (..., H, W) + height, width = bottom[0].data.shape[-2:] + # GT boxes (x1, y1, x2, y2, label) + gt_boxes = bottom[1].data + # im_info + im_info = bottom[2].data[0, :] + + if DEBUG: + print '' + print 'im_size: ({}, {})'.format(im_info[0], im_info[1]) + print 'scale: {}'.format(im_info[2]) + print 'height, width: ({}, {})'.format(height, width) + print 'rpn: gt_boxes.shape', gt_boxes.shape + print 'rpn: gt_boxes', gt_boxes + + # 1. Generate proposals from bbox deltas and shifted anchors + shift_x = np.arange(0, width) * self._feat_stride + shift_y = np.arange(0, height) * self._feat_stride + shift_x, shift_y = np.meshgrid(shift_x, shift_y) + shifts = np.vstack((shift_x.ravel(), shift_y.ravel(), + shift_x.ravel(), shift_y.ravel())).transpose() + # add A anchors (1, A, 4) to + # cell K shifts (K, 1, 4) to get + # shift anchors (K, A, 4) + # reshape to (K*A, 4) shifted anchors + A = self._num_anchors + K = shifts.shape[0] + all_anchors = (self._anchors.reshape((1, A, 4)) + + shifts.reshape((1, K, 4)).transpose((1, 0, 2))) + all_anchors = all_anchors.reshape((K * A, 4)) + total_anchors = int(K * A) + + # only keep anchors inside the image + inds_inside = np.where( + (all_anchors[:, 0] >= -self._allowed_border) & + (all_anchors[:, 1] >= -self._allowed_border) & + (all_anchors[:, 2] < im_info[1] + self._allowed_border) & # width + (all_anchors[:, 3] < im_info[0] + self._allowed_border) # height + )[0] + + if DEBUG: + print 'total_anchors', total_anchors + print 'inds_inside', len(inds_inside) + + # keep only inside anchors + anchors = all_anchors[inds_inside, :] + if DEBUG: + print 'anchors.shape', anchors.shape + + # label: 1 is positive, 0 is negative, -1 is dont care + labels = np.empty((len(inds_inside), ), dtype=np.float32) + labels.fill(-1) + + # overlaps between the anchors and the gt boxes + # overlaps (ex, gt) + overlaps = bbox_overlaps( + np.ascontiguousarray(anchors, dtype=np.float), + np.ascontiguousarray(gt_boxes, dtype=np.float)) + argmax_overlaps = overlaps.argmax(axis=1) + max_overlaps = overlaps[np.arange(len(inds_inside)), argmax_overlaps] + gt_argmax_overlaps = overlaps.argmax(axis=0) + gt_max_overlaps = overlaps[gt_argmax_overlaps, + np.arange(overlaps.shape[1])] + gt_argmax_overlaps = np.where(overlaps == gt_max_overlaps)[0] + + if not cfg.TRAIN.RPN_CLOBBER_POSITIVES: + # assign bg labels first so that positive labels can clobber them + labels[max_overlaps < cfg.TRAIN.RPN_NEGATIVE_OVERLAP] = 0 + + # fg label: for each gt, anchor with highest overlap + labels[gt_argmax_overlaps] = 1 + + # fg label: above threshold IOU + labels[max_overlaps >= cfg.TRAIN.RPN_POSITIVE_OVERLAP] = 1 + + if cfg.TRAIN.RPN_CLOBBER_POSITIVES: + # assign bg labels last so that negative labels can clobber positives + labels[max_overlaps < cfg.TRAIN.RPN_NEGATIVE_OVERLAP] = 0 + + # subsample positive labels if we have too many + num_fg = int(cfg.TRAIN.RPN_FG_FRACTION * cfg.TRAIN.RPN_BATCHSIZE) + fg_inds = np.where(labels == 1)[0] + if len(fg_inds) > num_fg: + disable_inds = npr.choice( + fg_inds, size=(len(fg_inds) - num_fg), replace=False) + labels[disable_inds] = -1 + + # subsample negative labels if we have too many + num_bg = cfg.TRAIN.RPN_BATCHSIZE - np.sum(labels == 1) + bg_inds = np.where(labels == 0)[0] + if len(bg_inds) > num_bg: + disable_inds = npr.choice( + bg_inds, size=(len(bg_inds) - num_bg), replace=False) + labels[disable_inds] = -1 + #print "was %s inds, disabling %s, now %s inds" % ( + #len(bg_inds), len(disable_inds), np.sum(labels == 0)) + + bbox_targets = np.zeros((len(inds_inside), 4), dtype=np.float32) + bbox_targets = _compute_targets(anchors, gt_boxes[argmax_overlaps, :]) + + bbox_inside_weights = np.zeros((len(inds_inside), 4), dtype=np.float32) + bbox_inside_weights[labels == 1, :] = np.array(cfg.TRAIN.RPN_BBOX_INSIDE_WEIGHTS) + + bbox_outside_weights = np.zeros((len(inds_inside), 4), dtype=np.float32) + if cfg.TRAIN.RPN_POSITIVE_WEIGHT < 0: + # uniform weighting of examples (given non-uniform sampling) + num_examples = np.sum(labels >= 0) + positive_weights = np.ones((1, 4)) * 1.0 / num_examples + negative_weights = np.ones((1, 4)) * 1.0 / num_examples + else: + assert ((cfg.TRAIN.RPN_POSITIVE_WEIGHT > 0) & + (cfg.TRAIN.RPN_POSITIVE_WEIGHT < 1)) + positive_weights = (cfg.TRAIN.RPN_POSITIVE_WEIGHT / + np.sum(labels == 1)) + negative_weights = ((1.0 - cfg.TRAIN.RPN_POSITIVE_WEIGHT) / + np.sum(labels == 0)) + bbox_outside_weights[labels == 1, :] = positive_weights + bbox_outside_weights[labels == 0, :] = negative_weights + + if DEBUG: + self._sums += bbox_targets[labels == 1, :].sum(axis=0) + self._squared_sums += (bbox_targets[labels == 1, :] ** 2).sum(axis=0) + self._counts += np.sum(labels == 1) + means = self._sums / self._counts + stds = np.sqrt(self._squared_sums / self._counts - means ** 2) + print 'means:' + print means + print 'stdevs:' + print stds + + # map up to original set of anchors + labels = _unmap(labels, total_anchors, inds_inside, fill=-1) + bbox_targets = _unmap(bbox_targets, total_anchors, inds_inside, fill=0) + bbox_inside_weights = _unmap(bbox_inside_weights, total_anchors, inds_inside, fill=0) + bbox_outside_weights = _unmap(bbox_outside_weights, total_anchors, inds_inside, fill=0) + + if DEBUG: + print 'rpn: max max_overlap', np.max(max_overlaps) + print 'rpn: num_positive', np.sum(labels == 1) + print 'rpn: num_negative', np.sum(labels == 0) + self._fg_sum += np.sum(labels == 1) + self._bg_sum += np.sum(labels == 0) + self._count += 1 + print 'rpn: num_positive avg', self._fg_sum / self._count + print 'rpn: num_negative avg', self._bg_sum / self._count + + # labels + labels = labels.reshape((1, height, width, A)).transpose(0, 3, 1, 2) + labels = labels.reshape((1, 1, A * height, width)) + top[0].reshape(*labels.shape) + top[0].data[...] = labels + + # bbox_targets + bbox_targets = bbox_targets \ + .reshape((1, height, width, A * 4)).transpose(0, 3, 1, 2) + top[1].reshape(*bbox_targets.shape) + top[1].data[...] = bbox_targets + + # bbox_inside_weights + bbox_inside_weights = bbox_inside_weights \ + .reshape((1, height, width, A * 4)).transpose(0, 3, 1, 2) + assert bbox_inside_weights.shape[2] == height + assert bbox_inside_weights.shape[3] == width + top[2].reshape(*bbox_inside_weights.shape) + top[2].data[...] = bbox_inside_weights + + # bbox_outside_weights + bbox_outside_weights = bbox_outside_weights \ + .reshape((1, height, width, A * 4)).transpose(0, 3, 1, 2) + assert bbox_outside_weights.shape[2] == height + assert bbox_outside_weights.shape[3] == width + top[3].reshape(*bbox_outside_weights.shape) + top[3].data[...] = bbox_outside_weights + + def backward(self, top, propagate_down, bottom): + """This layer does not propagate gradients.""" + pass + + def reshape(self, bottom, top): + """Reshaping happens during the call to forward.""" + pass + + +def _unmap(data, count, inds, fill=0): + """ Unmap a subset of item (data) back to the original set of items (of + size count) """ + if len(data.shape) == 1: + ret = np.empty((count, ), dtype=np.float32) + ret.fill(fill) + ret[inds] = data + else: + ret = np.empty((count, ) + data.shape[1:], dtype=np.float32) + ret.fill(fill) + ret[inds, :] = data + return ret + + +def _compute_targets(ex_rois, gt_rois): + """Compute bounding-box regression targets for an image.""" + + assert ex_rois.shape[0] == gt_rois.shape[0] + assert ex_rois.shape[1] == 4 + assert gt_rois.shape[1] == 5 + + return bbox_transform(ex_rois, gt_rois[:, :4]).astype(np.float32, copy=False) diff --git a/lib/rpn/generate.py b/lib/rpn/generate.py new file mode 100644 index 0000000..060daf4 --- /dev/null +++ b/lib/rpn/generate.py @@ -0,0 +1,117 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +from fast_rcnn.config import cfg +from utils.blob import im_list_to_blob +from utils.timer import Timer +import numpy as np +import cv2 + +def _vis_proposals(im, dets, thresh=0.5): + """Draw detected bounding boxes.""" + inds = np.where(dets[:, -1] >= thresh)[0] + if len(inds) == 0: + return + + class_name = 'obj' + im = im[:, :, (2, 1, 0)] + fig, ax = plt.subplots(figsize=(12, 12)) + ax.imshow(im, aspect='equal') + for i in inds: + bbox = dets[i, :4] + score = dets[i, -1] + + ax.add_patch( + plt.Rectangle((bbox[0], bbox[1]), + bbox[2] - bbox[0], + bbox[3] - bbox[1], fill=False, + edgecolor='red', linewidth=3.5) + ) + ax.text(bbox[0], bbox[1] - 2, + '{:s} {:.3f}'.format(class_name, score), + bbox=dict(facecolor='blue', alpha=0.5), + fontsize=14, color='white') + + ax.set_title(('{} detections with ' + 'p({} | box) >= {:.1f}').format(class_name, class_name, + thresh), + fontsize=14) + plt.axis('off') + plt.tight_layout() + plt.draw() + +def _get_image_blob(im): + """Converts an image into a network input. + + Arguments: + im (ndarray): a color image in BGR order + + Returns: + blob (ndarray): a data blob holding an image pyramid + im_scale_factors (list): list of image scales (relative to im) used + in the image pyramid + """ + im_orig = im.astype(np.float32, copy=True) + im_orig -= cfg.PIXEL_MEANS + + im_shape = im_orig.shape + im_size_min = np.min(im_shape[0:2]) + im_size_max = np.max(im_shape[0:2]) + + processed_ims = [] + + assert len(cfg.TEST.SCALES) == 1 + target_size = cfg.TEST.SCALES[0] + + im_scale = float(target_size) / float(im_size_min) + # Prevent the biggest axis from being more than MAX_SIZE + if np.round(im_scale * im_size_max) > cfg.TEST.MAX_SIZE: + im_scale = float(cfg.TEST.MAX_SIZE) / float(im_size_max) + im = cv2.resize(im_orig, None, None, fx=im_scale, fy=im_scale, + interpolation=cv2.INTER_LINEAR) + im_info = np.hstack((im.shape[:2], im_scale))[np.newaxis, :] + processed_ims.append(im) + + # Create a blob to hold the input images + blob = im_list_to_blob(processed_ims) + + return blob, im_info + +def im_proposals(net, im): + """Generate RPN proposals on a single image.""" + blobs = {} + blobs['data'], blobs['im_info'] = _get_image_blob(im) + net.blobs['data'].reshape(*(blobs['data'].shape)) + net.blobs['im_info'].reshape(*(blobs['im_info'].shape)) + blobs_out = net.forward( + data=blobs['data'].astype(np.float32, copy=False), + im_info=blobs['im_info'].astype(np.float32, copy=False)) + + scale = blobs['im_info'][0, 2] + boxes = blobs_out['rois'][:, 1:].copy() / scale + scores = blobs_out['scores'].copy() + return boxes, scores + +def imdb_proposals(net, imdb): + """Generate RPN proposals on all images in an imdb.""" + + _t = Timer() + imdb_boxes = [[] for _ in xrange(imdb.num_images)] + for i in xrange(imdb.num_images): + im = cv2.imread(imdb.image_path_at(i)) + _t.tic() + imdb_boxes[i], scores = im_proposals(net, im) + _t.toc() + print 'im_proposals: {:d}/{:d} {:.3f}s' \ + .format(i + 1, imdb.num_images, _t.average_time) + if 0: + dets = np.hstack((imdb_boxes[i], scores)) + # from IPython import embed; embed() + _vis_proposals(im, dets[:3, :], thresh=0.9) + plt.show() + + return imdb_boxes diff --git a/lib/rpn/generate_anchors.py b/lib/rpn/generate_anchors.py new file mode 100644 index 0000000..1125a80 --- /dev/null +++ b/lib/rpn/generate_anchors.py @@ -0,0 +1,105 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick and Sean Bell +# -------------------------------------------------------- + +import numpy as np + +# Verify that we compute the same anchors as Shaoqing's matlab implementation: +# +# >> load output/rpn_cachedir/faster_rcnn_VOC2007_ZF_stage1_rpn/anchors.mat +# >> anchors +# +# anchors = +# +# -83 -39 100 56 +# -175 -87 192 104 +# -359 -183 376 200 +# -55 -55 72 72 +# -119 -119 136 136 +# -247 -247 264 264 +# -35 -79 52 96 +# -79 -167 96 184 +# -167 -343 184 360 + +#array([[ -83., -39., 100., 56.], +# [-175., -87., 192., 104.], +# [-359., -183., 376., 200.], +# [ -55., -55., 72., 72.], +# [-119., -119., 136., 136.], +# [-247., -247., 264., 264.], +# [ -35., -79., 52., 96.], +# [ -79., -167., 96., 184.], +# [-167., -343., 184., 360.]]) + +def generate_anchors(base_size=16, ratios=[0.5, 1, 2], + scales=2**np.arange(3, 6)): + """ + Generate anchor (reference) windows by enumerating aspect ratios X + scales wrt a reference (0, 0, 15, 15) window. + """ + + base_anchor = np.array([1, 1, base_size, base_size]) - 1 + ratio_anchors = _ratio_enum(base_anchor, ratios) + anchors = np.vstack([_scale_enum(ratio_anchors[i, :], scales) + for i in xrange(ratio_anchors.shape[0])]) + return anchors + +def _whctrs(anchor): + """ + Return width, height, x center, and y center for an anchor (window). + """ + + w = anchor[2] - anchor[0] + 1 + h = anchor[3] - anchor[1] + 1 + x_ctr = anchor[0] + 0.5 * (w - 1) + y_ctr = anchor[1] + 0.5 * (h - 1) + return w, h, x_ctr, y_ctr + +def _mkanchors(ws, hs, x_ctr, y_ctr): + """ + Given a vector of widths (ws) and heights (hs) around a center + (x_ctr, y_ctr), output a set of anchors (windows). + """ + + ws = ws[:, np.newaxis] + hs = hs[:, np.newaxis] + anchors = np.hstack((x_ctr - 0.5 * (ws - 1), + y_ctr - 0.5 * (hs - 1), + x_ctr + 0.5 * (ws - 1), + y_ctr + 0.5 * (hs - 1))) + return anchors + +def _ratio_enum(anchor, ratios): + """ + Enumerate a set of anchors for each aspect ratio wrt an anchor. + """ + + w, h, x_ctr, y_ctr = _whctrs(anchor) + size = w * h + size_ratios = size / ratios + ws = np.round(np.sqrt(size_ratios)) + hs = np.round(ws * ratios) + anchors = _mkanchors(ws, hs, x_ctr, y_ctr) + return anchors + +def _scale_enum(anchor, scales): + """ + Enumerate a set of anchors for each scale wrt an anchor. + """ + + w, h, x_ctr, y_ctr = _whctrs(anchor) + ws = w * scales + hs = h * scales + anchors = _mkanchors(ws, hs, x_ctr, y_ctr) + return anchors + +if __name__ == '__main__': + import time + t = time.time() + a = generate_anchors() + print time.time() - t + print a + from IPython import embed; embed() diff --git a/lib/rpn/proposal_layer.py b/lib/rpn/proposal_layer.py new file mode 100644 index 0000000..24b9db7 --- /dev/null +++ b/lib/rpn/proposal_layer.py @@ -0,0 +1,177 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick and Sean Bell +# -------------------------------------------------------- + +import caffe +import numpy as np +import yaml +from fast_rcnn.config import cfg +from generate_anchors import generate_anchors +from fast_rcnn.bbox_transform import bbox_transform_inv, clip_boxes +from fast_rcnn.nms_wrapper import nms + +DEBUG = False + +class ProposalLayer(caffe.Layer): + """ + Outputs object detection proposals by applying estimated bounding-box + transformations to a set of regular boxes (called "anchors"). + """ + + def setup(self, bottom, top): + # parse the layer parameter string, which must be valid YAML + layer_params = yaml.load(self.param_str) + + self._feat_stride = layer_params['feat_stride'] + anchor_scales = layer_params.get('scales', (8, 16, 32)) + self._anchors = generate_anchors(scales=np.array(anchor_scales)) + self._num_anchors = self._anchors.shape[0] + + if DEBUG: + print 'feat_stride: {}'.format(self._feat_stride) + print 'anchors:' + print self._anchors + + # rois blob: holds R regions of interest, each is a 5-tuple + # (n, x1, y1, x2, y2) specifying an image batch index n and a + # rectangle (x1, y1, x2, y2) + top[0].reshape(1, 5) + + # scores blob: holds scores for R regions of interest + if len(top) > 1: + top[1].reshape(1, 1, 1, 1) + + def forward(self, bottom, top): + # Algorithm: + # + # for each (H, W) location i + # generate A anchor boxes centered on cell i + # apply predicted bbox deltas at cell i to each of the A anchors + # clip predicted boxes to image + # remove predicted boxes with either height or width < threshold + # sort all (proposal, score) pairs by score from highest to lowest + # take top pre_nms_topN proposals before NMS + # apply NMS with threshold 0.7 to remaining proposals + # take after_nms_topN proposals after NMS + # return the top proposals (-> RoIs top, scores top) + + assert bottom[0].data.shape[0] == 1, \ + 'Only single item batches are supported' + + cfg_key = str('TRAIN' if self.phase == 0 else 'TEST') # either 'TRAIN' or 'TEST' + pre_nms_topN = cfg[cfg_key].RPN_PRE_NMS_TOP_N + post_nms_topN = cfg[cfg_key].RPN_POST_NMS_TOP_N + nms_thresh = cfg[cfg_key].RPN_NMS_THRESH + min_size = cfg[cfg_key].RPN_MIN_SIZE + + # the first set of _num_anchors channels are bg probs + # the second set are the fg probs, which we want + scores = bottom[0].data[:, self._num_anchors:, :, :] + bbox_deltas = bottom[1].data + im_info = bottom[2].data[0, :] + + if DEBUG: + print 'im_size: ({}, {})'.format(im_info[0], im_info[1]) + print 'scale: {}'.format(im_info[2]) + + # 1. Generate proposals from bbox deltas and shifted anchors + height, width = scores.shape[-2:] + + if DEBUG: + print 'score map size: {}'.format(scores.shape) + + # Enumerate all shifts + shift_x = np.arange(0, width) * self._feat_stride + shift_y = np.arange(0, height) * self._feat_stride + shift_x, shift_y = np.meshgrid(shift_x, shift_y) + shifts = np.vstack((shift_x.ravel(), shift_y.ravel(), + shift_x.ravel(), shift_y.ravel())).transpose() + + # Enumerate all shifted anchors: + # + # add A anchors (1, A, 4) to + # cell K shifts (K, 1, 4) to get + # shift anchors (K, A, 4) + # reshape to (K*A, 4) shifted anchors + A = self._num_anchors + K = shifts.shape[0] + anchors = self._anchors.reshape((1, A, 4)) + \ + shifts.reshape((1, K, 4)).transpose((1, 0, 2)) + anchors = anchors.reshape((K * A, 4)) + + # Transpose and reshape predicted bbox transformations to get them + # into the same order as the anchors: + # + # bbox deltas will be (1, 4 * A, H, W) format + # transpose to (1, H, W, 4 * A) + # reshape to (1 * H * W * A, 4) where rows are ordered by (h, w, a) + # in slowest to fastest order + bbox_deltas = bbox_deltas.transpose((0, 2, 3, 1)).reshape((-1, 4)) + + # Same story for the scores: + # + # scores are (1, A, H, W) format + # transpose to (1, H, W, A) + # reshape to (1 * H * W * A, 1) where rows are ordered by (h, w, a) + scores = scores.transpose((0, 2, 3, 1)).reshape((-1, 1)) + + # Convert anchors into proposals via bbox transformations + proposals = bbox_transform_inv(anchors, bbox_deltas) + + # 2. clip predicted boxes to image + proposals = clip_boxes(proposals, im_info[:2]) + + # 3. remove predicted boxes with either height or width < threshold + # (NOTE: convert min_size to input image scale stored in im_info[2]) + keep = _filter_boxes(proposals, min_size * im_info[2]) + proposals = proposals[keep, :] + scores = scores[keep] + + # 4. sort all (proposal, score) pairs by score from highest to lowest + # 5. take top pre_nms_topN (e.g. 6000) + order = scores.ravel().argsort()[::-1] + if pre_nms_topN > 0: + order = order[:pre_nms_topN] + proposals = proposals[order, :] + scores = scores[order] + + # 6. apply nms (e.g. threshold = 0.7) + # 7. take after_nms_topN (e.g. 300) + # 8. return the top proposals (-> RoIs top) + keep = nms(np.hstack((proposals, scores)), nms_thresh) + if post_nms_topN > 0: + keep = keep[:post_nms_topN] + proposals = proposals[keep, :] + scores = scores[keep] + + # Output rois blob + # Our RPN implementation only supports a single input image, so all + # batch inds are 0 + batch_inds = np.zeros((proposals.shape[0], 1), dtype=np.float32) + blob = np.hstack((batch_inds, proposals.astype(np.float32, copy=False))) + # print blob.shape + top[0].reshape(*(blob.shape)) + top[0].data[...] = blob + + # [Optional] output scores blob + if len(top) > 1: + top[1].reshape(*(scores.shape)) + top[1].data[...] = scores + + def backward(self, top, propagate_down, bottom): + """This layer does not propagate gradients.""" + pass + + def reshape(self, bottom, top): + """Reshaping happens during the call to forward.""" + pass + +def _filter_boxes(boxes, min_size): + """Remove all boxes with any side smaller than min_size.""" + ws = boxes[:, 2] - boxes[:, 0] + 1 + hs = boxes[:, 3] - boxes[:, 1] + 1 + keep = np.where((ws >= min_size) & (hs >= min_size))[0] + return keep diff --git a/lib/rpn/proposal_target_layer.py b/lib/rpn/proposal_target_layer.py new file mode 100644 index 0000000..0bf2f5a --- /dev/null +++ b/lib/rpn/proposal_target_layer.py @@ -0,0 +1,216 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick and Sean Bell +# -------------------------------------------------------- + +import caffe +import yaml +import numpy as np +import numpy.random as npr +from fast_rcnn.config import cfg +from fast_rcnn.bbox_transform import bbox_transform +from utils.cython_bbox import bbox_overlaps + +DEBUG = False + +class ProposalTargetLayer(caffe.Layer): + """ + Assign object detection proposals to ground-truth targets. Produces proposal + classification labels and bounding-box regression targets. + """ + + def setup(self, bottom, top): + layer_params = yaml.load(self.param_str) + self._num_classes = layer_params['num_classes'] + + # sampled rois (0, x1, y1, x2, y2) + top[0].reshape(1, 5, 1, 1) + # labels + top[1].reshape(1, 1, 1, 1) + # bbox_targets + top[2].reshape(1, self._num_classes * 4, 1, 1) + # bbox_inside_weights + top[3].reshape(1, self._num_classes * 4, 1, 1) + # bbox_outside_weights + top[4].reshape(1, self._num_classes * 4, 1, 1) + + def forward(self, bottom, top): + # Proposal ROIs (0, x1, y1, x2, y2) coming from RPN + # (i.e., rpn.proposal_layer.ProposalLayer), or any other source + all_rois = bottom[0].data + # GT boxes (x1, y1, x2, y2, label) + # TODO(rbg): it's annoying that sometimes I have extra info before + # and other times after box coordinates -- normalize to one format + gt_boxes = bottom[1].data + + # Include ground-truth boxes in the set of candidate rois + zeros = np.zeros((gt_boxes.shape[0], 1), dtype=gt_boxes.dtype) + all_rois = np.vstack( + (all_rois, np.hstack((zeros, gt_boxes[:, :-1]))) + ) + + # Sanity check: single batch only + assert np.all(all_rois[:, 0] == 0), \ + 'Only single item batches are supported' + + rois_per_image = np.inf if cfg.TRAIN.BATCH_SIZE == -1 else cfg.TRAIN.BATCH_SIZE + fg_rois_per_image = np.round(cfg.TRAIN.FG_FRACTION * rois_per_image) + + # Sample rois with classification labels and bounding box regression + # targets + # print 'proposal_target_layer:', fg_rois_per_image + labels, rois, bbox_targets, bbox_inside_weights = _sample_rois( + all_rois, gt_boxes, fg_rois_per_image, + rois_per_image, self._num_classes) + + if DEBUG: + print 'num fg: {}'.format((labels > 0).sum()) + print 'num bg: {}'.format((labels == 0).sum()) + self._count += 1 + self._fg_num += (labels > 0).sum() + self._bg_num += (labels == 0).sum() + print 'num fg avg: {}'.format(self._fg_num / self._count) + print 'num bg avg: {}'.format(self._bg_num / self._count) + print 'ratio: {:.3f}'.format(float(self._fg_num) / float(self._bg_num)) + + # sampled rois + # modified by ywxiong + rois = rois.reshape((rois.shape[0], rois.shape[1], 1, 1)) + top[0].reshape(*rois.shape) + top[0].data[...] = rois + + # classification labels + # modified by ywxiong + labels = labels.reshape((labels.shape[0], 1, 1, 1)) + top[1].reshape(*labels.shape) + top[1].data[...] = labels + + # bbox_targets + # modified by ywxiong + bbox_targets = bbox_targets.reshape((bbox_targets.shape[0], bbox_targets.shape[1], 1, 1)) + top[2].reshape(*bbox_targets.shape) + top[2].data[...] = bbox_targets + + # bbox_inside_weights + # modified by ywxiong + bbox_inside_weights = bbox_inside_weights.reshape((bbox_inside_weights.shape[0], bbox_inside_weights.shape[1], 1, 1)) + top[3].reshape(*bbox_inside_weights.shape) + top[3].data[...] = bbox_inside_weights + + # bbox_outside_weights + # modified by ywxiong + bbox_inside_weights = bbox_inside_weights.reshape((bbox_inside_weights.shape[0], bbox_inside_weights.shape[1], 1, 1)) + top[4].reshape(*bbox_inside_weights.shape) + top[4].data[...] = np.array(bbox_inside_weights > 0).astype(np.float32) + + def backward(self, top, propagate_down, bottom): + """This layer does not propagate gradients.""" + pass + + def reshape(self, bottom, top): + """Reshaping happens during the call to forward.""" + pass + + +def _get_bbox_regression_labels(bbox_target_data, num_classes): + """Bounding-box regression targets (bbox_target_data) are stored in a + compact form N x (class, tx, ty, tw, th) + + This function expands those targets into the 4-of-4*K representation used + by the network (i.e. only one class has non-zero targets). + + Returns: + bbox_target (ndarray): N x 4K blob of regression targets + bbox_inside_weights (ndarray): N x 4K blob of loss weights + """ + + clss = bbox_target_data[:, 0] + bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32) + # print 'proposal_target_layer:', bbox_targets.shape + bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32) + inds = np.where(clss > 0)[0] + if cfg.TRAIN.AGONISTIC: + for ind in inds: + cls = clss[ind] + start = 4 * (1 if cls > 0 else 0) + end = start + 4 + bbox_targets[ind, start:end] = bbox_target_data[ind, 1:] + bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS + else: + for ind in inds: + cls = clss[ind] + start = 4 * cls + end = start + 4 + bbox_targets[ind, start:end] = bbox_target_data[ind, 1:] + bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS + return bbox_targets, bbox_inside_weights + + +def _compute_targets(ex_rois, gt_rois, labels): + """Compute bounding-box regression targets for an image.""" + + assert ex_rois.shape[0] == gt_rois.shape[0] + assert ex_rois.shape[1] == 4 + assert gt_rois.shape[1] == 4 + + targets = bbox_transform(ex_rois, gt_rois) + if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED: + # Optionally normalize targets by a precomputed mean and stdev + targets = ((targets - np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS)) + / np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS)) + return np.hstack( + (labels[:, np.newaxis], targets)).astype(np.float32, copy=False) + +def _sample_rois(all_rois, gt_boxes, fg_rois_per_image, rois_per_image, num_classes): + """Generate a random sample of RoIs comprising foreground and background + examples. + """ + # overlaps: (rois x gt_boxes) + overlaps = bbox_overlaps( + np.ascontiguousarray(all_rois[:, 1:5], dtype=np.float), + np.ascontiguousarray(gt_boxes[:, :4], dtype=np.float)) + gt_assignment = overlaps.argmax(axis=1) + max_overlaps = overlaps.max(axis=1) + labels = gt_boxes[gt_assignment, 4] + + # Select foreground RoIs as those with >= FG_THRESH overlap + fg_inds = np.where(max_overlaps >= cfg.TRAIN.FG_THRESH)[0] + # Guard against the case when an image has fewer than fg_rois_per_image + # foreground RoIs + fg_rois_per_this_image = min(fg_rois_per_image, fg_inds.size) + # Sample foreground regions without replacement + if fg_inds.size > 0: + fg_inds = npr.choice(fg_inds, size=fg_rois_per_this_image, replace=False) + + # Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI) + bg_inds = np.where((max_overlaps < cfg.TRAIN.BG_THRESH_HI) & + (max_overlaps >= cfg.TRAIN.BG_THRESH_LO))[0] + # Compute number of background RoIs to take from this image (guarding + # against there being fewer than desired) + bg_rois_per_this_image = rois_per_image - fg_rois_per_this_image + bg_rois_per_this_image = min(bg_rois_per_this_image, bg_inds.size) + # Sample background regions without replacement + if bg_inds.size > 0: + bg_inds = npr.choice(bg_inds, size=bg_rois_per_this_image, replace=False) + + # The indices that we're selecting (both fg and bg) + keep_inds = np.append(fg_inds, bg_inds) + # print 'proposal_target_layer:', keep_inds + + # Select sampled values from various arrays: + labels = labels[keep_inds] + # Clamp labels for the background RoIs to 0 + labels[fg_rois_per_this_image:] = 0 + rois = all_rois[keep_inds] + + # print 'proposal_target_layer:', rois + bbox_target_data = _compute_targets( + rois[:, 1:5], gt_boxes[gt_assignment[keep_inds], :4], labels) + + # print 'proposal_target_layer:', bbox_target_data + bbox_targets, bbox_inside_weights = \ + _get_bbox_regression_labels(bbox_target_data, num_classes) + + return labels, rois, bbox_targets, bbox_inside_weights diff --git a/lib/setup.py b/lib/setup.py new file mode 100644 index 0000000..0f4615f --- /dev/null +++ b/lib/setup.py @@ -0,0 +1,156 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import os +from os.path import join as pjoin +from setuptools import setup +from distutils.extension import Extension +from Cython.Distutils import build_ext +import subprocess +import numpy as np + +def find_in_path(name, path): + "Find a file in a search path" + # Adapted fom + # http://code.activestate.com/recipes/52224-find-a-file-given-a-search-path/ + for dir in path.split(os.pathsep): + binpath = pjoin(dir, name) + if os.path.exists(binpath): + return os.path.abspath(binpath) + return None + + +def locate_cuda(): + """Locate the CUDA environment on the system + + Returns a dict with keys 'home', 'nvcc', 'include', and 'lib64' + and values giving the absolute path to each directory. + + Starts by looking for the CUDAHOME env variable. If not found, everything + is based on finding 'nvcc' in the PATH. + """ + + # first check if the CUDAHOME env variable is in use + if 'CUDAHOME' in os.environ: + home = os.environ['CUDAHOME'] + nvcc = pjoin(home, 'bin', 'nvcc') + else: + # otherwise, search the PATH for NVCC + default_path = pjoin(os.sep, 'usr', 'local', 'cuda', 'bin') + nvcc = find_in_path('nvcc', os.environ['PATH'] + os.pathsep + default_path) + if nvcc is None: + raise EnvironmentError('The nvcc binary could not be ' + 'located in your $PATH. Either add it to your path, or set $CUDAHOME') + home = os.path.dirname(os.path.dirname(nvcc)) + + cudaconfig = {'home':home, 'nvcc':nvcc, + 'include': pjoin(home, 'include'), + 'lib64': pjoin(home, 'lib64')} + for k, v in cudaconfig.iteritems(): + if not os.path.exists(v): + raise EnvironmentError('The CUDA %s path could not be located in %s' % (k, v)) + + return cudaconfig +CUDA = locate_cuda() + + +# Obtain the numpy include directory. This logic works across numpy versions. +try: + numpy_include = np.get_include() +except AttributeError: + numpy_include = np.get_numpy_include() + +def customize_compiler_for_nvcc(self): + """inject deep into distutils to customize how the dispatch + to gcc/nvcc works. + + If you subclass UnixCCompiler, it's not trivial to get your subclass + injected in, and still have the right customizations (i.e. + distutils.sysconfig.customize_compiler) run on it. So instead of going + the OO route, I have this. Note, it's kindof like a wierd functional + subclassing going on.""" + + # tell the compiler it can processes .cu + self.src_extensions.append('.cu') + + # save references to the default compiler_so and _comple methods + default_compiler_so = self.compiler_so + super = self._compile + + # now redefine the _compile method. This gets executed for each + # object but distutils doesn't have the ability to change compilers + # based on source extension: we add it. + def _compile(obj, src, ext, cc_args, extra_postargs, pp_opts): + if os.path.splitext(src)[1] == '.cu': + # use the cuda for .cu files + self.set_executable('compiler_so', CUDA['nvcc']) + # use only a subset of the extra_postargs, which are 1-1 translated + # from the extra_compile_args in the Extension class + postargs = extra_postargs['nvcc'] + else: + postargs = extra_postargs['gcc'] + + super(obj, src, ext, cc_args, postargs, pp_opts) + # reset the default compiler_so, which we might have changed for cuda + self.compiler_so = default_compiler_so + + # inject our redefined _compile method into the class + self._compile = _compile + + +# run the customize_compiler +class custom_build_ext(build_ext): + def build_extensions(self): + customize_compiler_for_nvcc(self.compiler) + build_ext.build_extensions(self) + + +ext_modules = [ + Extension( + "utils.cython_bbox", + ["utils/bbox.pyx"], + extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]}, + include_dirs = [numpy_include] + ), + Extension( + "nms.cpu_nms", + ["nms/cpu_nms.pyx"], + extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]}, + include_dirs = [numpy_include] + ), + Extension('nms.gpu_nms', + ['nms/nms_kernel.cu', 'nms/gpu_nms.pyx'], + library_dirs=[CUDA['lib64']], + libraries=['cudart'], + language='c++', + runtime_library_dirs=[CUDA['lib64']], + # this syntax is specific to this build system + # we're only going to use certain compiler args with nvcc and not with + # gcc the implementation of this trick is in customize_compiler() below + extra_compile_args={'gcc': ["-Wno-unused-function"], + 'nvcc': ['-arch=sm_35', + '--ptxas-options=-v', + '-c', + '--compiler-options', + "'-fPIC'"]}, + include_dirs = [numpy_include, CUDA['include']] + ), + Extension( + 'pycocotools._mask', + sources=['pycocotools/maskApi.c', 'pycocotools/_mask.pyx'], + include_dirs = [numpy_include, 'pycocotools'], + extra_compile_args={ + 'gcc': ['-Wno-cpp', '-Wno-unused-function', '-std=c99']}, + ), +] + +setup( + name='fast_rcnn', + ext_modules=ext_modules, + # inject our custom trigger + cmdclass={'build_ext': custom_build_ext}, +) diff --git a/lib/transform/__init__.py b/lib/transform/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/transform/torch_image_transform_layer.py b/lib/transform/torch_image_transform_layer.py new file mode 100644 index 0000000..9273b3a --- /dev/null +++ b/lib/transform/torch_image_transform_layer.py @@ -0,0 +1,64 @@ +# -------------------------------------------------------- +# Fast/er R-CNN +# Licensed under The MIT License [see LICENSE for details] +# -------------------------------------------------------- + +""" Transform images for compatibility with models trained with +https://github.com/facebook/fb.resnet.torch. + +Usage in model prototxt: + +layer { + name: 'data_xform' + type: 'Python' + bottom: 'data_caffe' + top: 'data' + python_param { + module: 'transform.torch_image_transform_layer' + layer: 'TorchImageTransformLayer' + } +} +""" + +import caffe +from fast_rcnn.config import cfg +import numpy as np + +class TorchImageTransformLayer(caffe.Layer): + def setup(self, bottom, top): + # (1, 3, 1, 1) shaped arrays + self.PIXEL_MEANS = \ + np.array([[[[0.48462227599918]], + [[0.45624044862054]], + [[0.40588363755159]]]]) + self.PIXEL_STDS = \ + np.array([[[[0.22889466674951]], + [[0.22446679341259]], + [[0.22495548344775]]]]) + # The default ("old") pixel means that were already subtracted + channel_swap = (0, 3, 1, 2) + self.OLD_PIXEL_MEANS = \ + cfg.PIXEL_MEANS[np.newaxis, :, :, :].transpose(channel_swap) + + top[0].reshape(*(bottom[0].shape)) + + def forward(self, bottom, top): + ims = bottom[0].data + # Invert the channel means that were already subtracted + ims += self.OLD_PIXEL_MEANS + # 1. Permute BGR to RGB and normalize to [0, 1] + ims = ims[:, [2, 1, 0], :, :] / 255.0 + # 2. Remove channel means + ims -= self.PIXEL_MEANS + # 3. Standardize channels + ims /= self.PIXEL_STDS + top[0].reshape(*(ims.shape)) + top[0].data[...] = ims + + def backward(self, top, propagate_down, bottom): + """This layer does not propagate gradients.""" + pass + + def reshape(self, bottom, top): + """Reshaping happens during the call to forward.""" + pass diff --git a/lib/utils/.gitignore b/lib/utils/.gitignore new file mode 100644 index 0000000..4b8a745 --- /dev/null +++ b/lib/utils/.gitignore @@ -0,0 +1,2 @@ +*.c +*.so diff --git a/lib/utils/__init__.py b/lib/utils/__init__.py new file mode 100644 index 0000000..7ba6a65 --- /dev/null +++ b/lib/utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- diff --git a/lib/utils/bbox.pyx b/lib/utils/bbox.pyx new file mode 100644 index 0000000..e14780d --- /dev/null +++ b/lib/utils/bbox.pyx @@ -0,0 +1,55 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Sergey Karayev +# -------------------------------------------------------- + +cimport cython +import numpy as np +cimport numpy as np + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + +def bbox_overlaps( + np.ndarray[DTYPE_t, ndim=2] boxes, + np.ndarray[DTYPE_t, ndim=2] query_boxes): + """ + Parameters + ---------- + boxes: (N, 4) ndarray of float + query_boxes: (K, 4) ndarray of float + Returns + ------- + overlaps: (N, K) ndarray of overlap between boxes and query_boxes + """ + cdef unsigned int N = boxes.shape[0] + cdef unsigned int K = query_boxes.shape[0] + cdef np.ndarray[DTYPE_t, ndim=2] overlaps = np.zeros((N, K), dtype=DTYPE) + cdef DTYPE_t iw, ih, box_area + cdef DTYPE_t ua + cdef unsigned int k, n + for k in range(K): + box_area = ( + (query_boxes[k, 2] - query_boxes[k, 0] + 1) * + (query_boxes[k, 3] - query_boxes[k, 1] + 1) + ) + for n in range(N): + iw = ( + min(boxes[n, 2], query_boxes[k, 2]) - + max(boxes[n, 0], query_boxes[k, 0]) + 1 + ) + if iw > 0: + ih = ( + min(boxes[n, 3], query_boxes[k, 3]) - + max(boxes[n, 1], query_boxes[k, 1]) + 1 + ) + if ih > 0: + ua = float( + (boxes[n, 2] - boxes[n, 0] + 1) * + (boxes[n, 3] - boxes[n, 1] + 1) + + box_area - iw * ih + ) + overlaps[n, k] = iw * ih / ua + return overlaps diff --git a/lib/utils/blob.py b/lib/utils/blob.py new file mode 100644 index 0000000..1c31642 --- /dev/null +++ b/lib/utils/blob.py @@ -0,0 +1,45 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Blob helper functions.""" + +import numpy as np +import cv2 + +def im_list_to_blob(ims): + """Convert a list of images into a network input. + + Assumes images are already prepared (means subtracted, BGR order, ...). + """ + max_shape = np.array([im.shape for im in ims]).max(axis=0) + num_images = len(ims) + blob = np.zeros((num_images, max_shape[0], max_shape[1], 3), + dtype=np.float32) + for i in xrange(num_images): + im = ims[i] + blob[i, 0:im.shape[0], 0:im.shape[1], :] = im + # Move channels (axis 3) to axis 1 + # Axis order will become: (batch elem, channel, height, width) + channel_swap = (0, 3, 1, 2) + blob = blob.transpose(channel_swap) + return blob + +def prep_im_for_blob(im, pixel_means, target_size, max_size): + """Mean subtract and scale an image for use in a blob.""" + im = im.astype(np.float32, copy=False) + im -= pixel_means + im_shape = im.shape + im_size_min = np.min(im_shape[0:2]) + im_size_max = np.max(im_shape[0:2]) + im_scale = float(target_size) / float(im_size_min) + # Prevent the biggest axis from being more than MAX_SIZE + if np.round(im_scale * im_size_max) > max_size: + im_scale = float(max_size) / float(im_size_max) + im = cv2.resize(im, None, None, fx=im_scale, fy=im_scale, + interpolation=cv2.INTER_LINEAR) + + return im, im_scale diff --git a/lib/utils/timer.py b/lib/utils/timer.py new file mode 100644 index 0000000..dacc942 --- /dev/null +++ b/lib/utils/timer.py @@ -0,0 +1,32 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import time + +class Timer(object): + """A simple timer.""" + def __init__(self): + self.total_time = 0. + self.calls = 0 + self.start_time = 0. + self.diff = 0. + self.average_time = 0. + + def tic(self): + # using time.time instead of time.clock because time time.clock + # does not normalize for multithreading + self.start_time = time.time() + + def toc(self, average=True): + self.diff = time.time() - self.start_time + self.total_time += self.diff + self.calls += 1 + self.average_time = self.total_time / self.calls + if average: + return self.average_time + else: + return self.diff diff --git a/models/coco/VGG16/fast_rcnn/solver.prototxt b/models/coco/VGG16/fast_rcnn/solver.prototxt new file mode 100644 index 0000000..e537e1b --- /dev/null +++ b/models/coco/VGG16/fast_rcnn/solver.prototxt @@ -0,0 +1,16 @@ +train_net: "models/coco/VGG16/fast_rcnn/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 200000 +display: 20 +average_loss: 100 +# iter_size: 1 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_fast_rcnn" +#debug_info: true diff --git a/models/coco/VGG16/fast_rcnn/test.prototxt b/models/coco/VGG16/fast_rcnn/test.prototxt new file mode 100644 index 0000000..5bc1e99 --- /dev/null +++ b/models/coco/VGG16/fast_rcnn/test.prototxt @@ -0,0 +1,499 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "rois" +input_shape { + dim: 1 # to be changed on-the-fly to num ROIs + dim: 5 # [batch ind, x1, y1, x2, y2] zero-based indexing +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/coco/VGG16/fast_rcnn/train.prototxt b/models/coco/VGG16/fast_rcnn/train.prototxt new file mode 100644 index 0000000..1bba398 --- /dev/null +++ b/models/coco/VGG16/fast_rcnn/train.prototxt @@ -0,0 +1,485 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 81" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/coco/VGG16/faster_rcnn_end2end/solver.prototxt b/models/coco/VGG16/faster_rcnn_end2end/solver.prototxt new file mode 100644 index 0000000..c8e57db --- /dev/null +++ b/models/coco/VGG16/faster_rcnn_end2end/solver.prototxt @@ -0,0 +1,15 @@ +train_net: "models/coco/VGG16/faster_rcnn_end2end/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 350000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_faster_rcnn" +iter_size: 2 diff --git a/models/coco/VGG16/faster_rcnn_end2end/test.prototxt b/models/coco/VGG16/faster_rcnn_end2end/test.prototxt new file mode 100644 index 0000000..a700a52 --- /dev/null +++ b/models/coco/VGG16/faster_rcnn_end2end/test.prototxt @@ -0,0 +1,590 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 24 # 2(bg/fg) * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 48 # 4 * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 24 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/coco/VGG16/faster_rcnn_end2end/train.prototxt b/models/coco/VGG16/faster_rcnn_end2end/train.prototxt new file mode 100644 index 0000000..d36c92d --- /dev/null +++ b/models/coco/VGG16/faster_rcnn_end2end/train.prototxt @@ -0,0 +1,642 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 81" + } +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 24 # 2(bg/fg) * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 48 # 4 * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 24 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 81" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/coco/VGG_CNN_M_1024/fast_rcnn/solver.prototxt b/models/coco/VGG_CNN_M_1024/fast_rcnn/solver.prototxt new file mode 100644 index 0000000..af9aa44 --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/fast_rcnn/solver.prototxt @@ -0,0 +1,15 @@ +train_net: "models/coco/VGG_CNN_M_1024/fast_rcnn/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 200000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_fast_rcnn" +#debug_info: true diff --git a/models/coco/VGG_CNN_M_1024/fast_rcnn/test.prototxt b/models/coco/VGG_CNN_M_1024/fast_rcnn/test.prototxt new file mode 100644 index 0000000..733a759 --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/fast_rcnn/test.prototxt @@ -0,0 +1,299 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "rois" +input_shape { + dim: 1 # to be changed on-the-fly to num ROIs + dim: 5 # [batch ind, x1, y1, x2, y2] zero-based indexing +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/coco/VGG_CNN_M_1024/fast_rcnn/train.prototxt b/models/coco/VGG_CNN_M_1024/fast_rcnn/train.prototxt new file mode 100644 index 0000000..2f87439 --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/fast_rcnn/train.prototxt @@ -0,0 +1,292 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 81" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt new file mode 100644 index 0000000..c398a08 --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt @@ -0,0 +1,14 @@ +train_net: "models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 350000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_faster_rcnn" diff --git a/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt new file mode 100644 index 0000000..ddc633e --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt @@ -0,0 +1,432 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +#layer { +# name: "rpn_conv/3x3" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/3x3" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 192 +# kernel_size: 3 pad: 1 stride: 1 +# weight_filler { type: "gaussian" std: 0.01 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn_conv/5x5" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/5x5" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 64 +# kernel_size: 5 pad: 2 stride: 1 +# weight_filler { type: "gaussian" std: 0.0036 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn/output" +# type: "Concat" +# bottom: "rpn_conv/3x3" +# bottom: "rpn_conv/5x5" +# top: "rpn/output" +#} +#layer { +# name: "rpn_relu/output" +# type: "ReLU" +# bottom: "rpn/output" +# top: "rpn/output" +#} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 24 # 2(bg/fg) * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 48 # 4 * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 24 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt new file mode 100644 index 0000000..a15e7b3 --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt @@ -0,0 +1,453 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 81" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 24 # 2(bg/fg) * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 48 # 4 * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 24 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 81" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/test.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/test.prototxt new file mode 100644 index 0000000..076dd02 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/test.prototxt @@ -0,0 +1,7186 @@ +name: "ResNet101" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "conv1" + bottom: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2a" + bottom: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2b" + bottom: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2a" + bottom: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2b" + bottom: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2a" + bottom: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2b" + bottom: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2a" + bottom: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2b" + bottom: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b1_branch2a" + name: "res3b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "bn3b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "scale3b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2a" + bottom: "res3b1_branch2a" + name: "res3b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2b" + name: "res3b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "bn3b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "scale3b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2b" + bottom: "res3b1_branch2b" + name: "res3b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2c" + name: "res3b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "bn3b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "scale3b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b1_branch2c" + top: "res3b1" + name: "res3b1" + type: "Eltwise" +} + +layer { + bottom: "res3b1" + top: "res3b1" + name: "res3b1_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1" + top: "res3b2_branch2a" + name: "res3b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "bn3b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "scale3b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2a" + bottom: "res3b2_branch2a" + name: "res3b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2b" + name: "res3b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "bn3b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "scale3b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2b" + bottom: "res3b2_branch2b" + name: "res3b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2c" + name: "res3b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "bn3b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "scale3b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1" + bottom: "res3b2_branch2c" + top: "res3b2" + name: "res3b2" + type: "Eltwise" +} + +layer { + bottom: "res3b2" + top: "res3b2" + name: "res3b2_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2" + top: "res3b3_branch2a" + name: "res3b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "bn3b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "scale3b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2a" + bottom: "res3b3_branch2a" + name: "res3b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2b" + name: "res3b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "bn3b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "scale3b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2b" + bottom: "res3b3_branch2b" + name: "res3b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2c" + name: "res3b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "bn3b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "scale3b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2" + bottom: "res3b3_branch2c" + top: "res3b3" + name: "res3b3" + type: "Eltwise" +} + +layer { + bottom: "res3b3" + top: "res3b3" + name: "res3b3_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2a" + bottom: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2b" + bottom: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b1_branch2a" + name: "res4b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "bn4b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "scale4b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2a" + bottom: "res4b1_branch2a" + name: "res4b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2b" + name: "res4b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "bn4b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "scale4b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2b" + bottom: "res4b1_branch2b" + name: "res4b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2c" + name: "res4b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "bn4b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "scale4b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b1_branch2c" + top: "res4b1" + name: "res4b1" + type: "Eltwise" +} + +layer { + bottom: "res4b1" + top: "res4b1" + name: "res4b1_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1" + top: "res4b2_branch2a" + name: "res4b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "bn4b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "scale4b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2a" + bottom: "res4b2_branch2a" + name: "res4b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2b" + name: "res4b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "bn4b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "scale4b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2b" + bottom: "res4b2_branch2b" + name: "res4b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2c" + name: "res4b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "bn4b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "scale4b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1" + bottom: "res4b2_branch2c" + top: "res4b2" + name: "res4b2" + type: "Eltwise" +} + +layer { + bottom: "res4b2" + top: "res4b2" + name: "res4b2_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2" + top: "res4b3_branch2a" + name: "res4b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "bn4b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "scale4b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2a" + bottom: "res4b3_branch2a" + name: "res4b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2b" + name: "res4b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "bn4b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "scale4b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2b" + bottom: "res4b3_branch2b" + name: "res4b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2c" + name: "res4b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "bn4b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "scale4b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2" + bottom: "res4b3_branch2c" + top: "res4b3" + name: "res4b3" + type: "Eltwise" +} + +layer { + bottom: "res4b3" + top: "res4b3" + name: "res4b3_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3" + top: "res4b4_branch2a" + name: "res4b4_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "bn4b4_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "scale4b4_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2a" + bottom: "res4b4_branch2a" + name: "res4b4_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2b" + name: "res4b4_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "bn4b4_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "scale4b4_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2b" + bottom: "res4b4_branch2b" + name: "res4b4_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2c" + name: "res4b4_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "bn4b4_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "scale4b4_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3" + bottom: "res4b4_branch2c" + top: "res4b4" + name: "res4b4" + type: "Eltwise" +} + +layer { + bottom: "res4b4" + top: "res4b4" + name: "res4b4_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4" + top: "res4b5_branch2a" + name: "res4b5_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "bn4b5_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "scale4b5_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2a" + bottom: "res4b5_branch2a" + name: "res4b5_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2b" + name: "res4b5_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "bn4b5_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "scale4b5_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2b" + bottom: "res4b5_branch2b" + name: "res4b5_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2c" + name: "res4b5_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "bn4b5_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "scale4b5_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4" + bottom: "res4b5_branch2c" + top: "res4b5" + name: "res4b5" + type: "Eltwise" +} + +layer { + bottom: "res4b5" + top: "res4b5" + name: "res4b5_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5" + top: "res4b6_branch2a" + name: "res4b6_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "bn4b6_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "scale4b6_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2a" + bottom: "res4b6_branch2a" + name: "res4b6_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2b" + name: "res4b6_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "bn4b6_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "scale4b6_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2b" + bottom: "res4b6_branch2b" + name: "res4b6_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2c" + name: "res4b6_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "bn4b6_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "scale4b6_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5" + bottom: "res4b6_branch2c" + top: "res4b6" + name: "res4b6" + type: "Eltwise" +} + +layer { + bottom: "res4b6" + top: "res4b6" + name: "res4b6_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6" + top: "res4b7_branch2a" + name: "res4b7_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "bn4b7_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "scale4b7_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2a" + bottom: "res4b7_branch2a" + name: "res4b7_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2b" + name: "res4b7_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "bn4b7_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "scale4b7_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2b" + bottom: "res4b7_branch2b" + name: "res4b7_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2c" + name: "res4b7_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "bn4b7_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "scale4b7_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6" + bottom: "res4b7_branch2c" + top: "res4b7" + name: "res4b7" + type: "Eltwise" +} + +layer { + bottom: "res4b7" + top: "res4b7" + name: "res4b7_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7" + top: "res4b8_branch2a" + name: "res4b8_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "bn4b8_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "scale4b8_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2a" + bottom: "res4b8_branch2a" + name: "res4b8_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2b" + name: "res4b8_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "bn4b8_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "scale4b8_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2b" + bottom: "res4b8_branch2b" + name: "res4b8_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2c" + name: "res4b8_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "bn4b8_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "scale4b8_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7" + bottom: "res4b8_branch2c" + top: "res4b8" + name: "res4b8" + type: "Eltwise" +} + +layer { + bottom: "res4b8" + top: "res4b8" + name: "res4b8_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8" + top: "res4b9_branch2a" + name: "res4b9_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "bn4b9_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "scale4b9_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2a" + bottom: "res4b9_branch2a" + name: "res4b9_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2b" + name: "res4b9_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "bn4b9_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "scale4b9_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2b" + bottom: "res4b9_branch2b" + name: "res4b9_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2c" + name: "res4b9_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "bn4b9_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "scale4b9_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8" + bottom: "res4b9_branch2c" + top: "res4b9" + name: "res4b9" + type: "Eltwise" +} + +layer { + bottom: "res4b9" + top: "res4b9" + name: "res4b9_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9" + top: "res4b10_branch2a" + name: "res4b10_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "bn4b10_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "scale4b10_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2a" + bottom: "res4b10_branch2a" + name: "res4b10_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2b" + name: "res4b10_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "bn4b10_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "scale4b10_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2b" + bottom: "res4b10_branch2b" + name: "res4b10_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2c" + name: "res4b10_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "bn4b10_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "scale4b10_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9" + bottom: "res4b10_branch2c" + top: "res4b10" + name: "res4b10" + type: "Eltwise" +} + +layer { + bottom: "res4b10" + top: "res4b10" + name: "res4b10_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10" + top: "res4b11_branch2a" + name: "res4b11_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "bn4b11_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "scale4b11_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2a" + bottom: "res4b11_branch2a" + name: "res4b11_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2b" + name: "res4b11_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "bn4b11_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "scale4b11_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2b" + bottom: "res4b11_branch2b" + name: "res4b11_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2c" + name: "res4b11_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "bn4b11_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "scale4b11_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10" + bottom: "res4b11_branch2c" + top: "res4b11" + name: "res4b11" + type: "Eltwise" +} + +layer { + bottom: "res4b11" + top: "res4b11" + name: "res4b11_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11" + top: "res4b12_branch2a" + name: "res4b12_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "bn4b12_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "scale4b12_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2a" + bottom: "res4b12_branch2a" + name: "res4b12_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2b" + name: "res4b12_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "bn4b12_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "scale4b12_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2b" + bottom: "res4b12_branch2b" + name: "res4b12_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2c" + name: "res4b12_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "bn4b12_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "scale4b12_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11" + bottom: "res4b12_branch2c" + top: "res4b12" + name: "res4b12" + type: "Eltwise" +} + +layer { + bottom: "res4b12" + top: "res4b12" + name: "res4b12_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12" + top: "res4b13_branch2a" + name: "res4b13_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "bn4b13_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "scale4b13_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2a" + bottom: "res4b13_branch2a" + name: "res4b13_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2b" + name: "res4b13_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "bn4b13_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "scale4b13_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2b" + bottom: "res4b13_branch2b" + name: "res4b13_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2c" + name: "res4b13_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "bn4b13_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "scale4b13_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12" + bottom: "res4b13_branch2c" + top: "res4b13" + name: "res4b13" + type: "Eltwise" +} + +layer { + bottom: "res4b13" + top: "res4b13" + name: "res4b13_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13" + top: "res4b14_branch2a" + name: "res4b14_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "bn4b14_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "scale4b14_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2a" + bottom: "res4b14_branch2a" + name: "res4b14_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2b" + name: "res4b14_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "bn4b14_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "scale4b14_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2b" + bottom: "res4b14_branch2b" + name: "res4b14_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2c" + name: "res4b14_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "bn4b14_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "scale4b14_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13" + bottom: "res4b14_branch2c" + top: "res4b14" + name: "res4b14" + type: "Eltwise" +} + +layer { + bottom: "res4b14" + top: "res4b14" + name: "res4b14_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14" + top: "res4b15_branch2a" + name: "res4b15_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "bn4b15_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "scale4b15_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2a" + bottom: "res4b15_branch2a" + name: "res4b15_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2b" + name: "res4b15_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "bn4b15_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "scale4b15_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2b" + bottom: "res4b15_branch2b" + name: "res4b15_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2c" + name: "res4b15_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "bn4b15_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "scale4b15_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14" + bottom: "res4b15_branch2c" + top: "res4b15" + name: "res4b15" + type: "Eltwise" +} + +layer { + bottom: "res4b15" + top: "res4b15" + name: "res4b15_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15" + top: "res4b16_branch2a" + name: "res4b16_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "bn4b16_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "scale4b16_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2a" + bottom: "res4b16_branch2a" + name: "res4b16_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2b" + name: "res4b16_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "bn4b16_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "scale4b16_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2b" + bottom: "res4b16_branch2b" + name: "res4b16_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2c" + name: "res4b16_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "bn4b16_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "scale4b16_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15" + bottom: "res4b16_branch2c" + top: "res4b16" + name: "res4b16" + type: "Eltwise" +} + +layer { + bottom: "res4b16" + top: "res4b16" + name: "res4b16_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16" + top: "res4b17_branch2a" + name: "res4b17_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "bn4b17_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "scale4b17_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2a" + bottom: "res4b17_branch2a" + name: "res4b17_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2b" + name: "res4b17_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "bn4b17_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "scale4b17_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2b" + bottom: "res4b17_branch2b" + name: "res4b17_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2c" + name: "res4b17_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "bn4b17_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "scale4b17_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16" + bottom: "res4b17_branch2c" + top: "res4b17" + name: "res4b17" + type: "Eltwise" +} + +layer { + bottom: "res4b17" + top: "res4b17" + name: "res4b17_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17" + top: "res4b18_branch2a" + name: "res4b18_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "bn4b18_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "scale4b18_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2a" + bottom: "res4b18_branch2a" + name: "res4b18_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2b" + name: "res4b18_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "bn4b18_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "scale4b18_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2b" + bottom: "res4b18_branch2b" + name: "res4b18_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2c" + name: "res4b18_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "bn4b18_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "scale4b18_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17" + bottom: "res4b18_branch2c" + top: "res4b18" + name: "res4b18" + type: "Eltwise" +} + +layer { + bottom: "res4b18" + top: "res4b18" + name: "res4b18_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18" + top: "res4b19_branch2a" + name: "res4b19_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "bn4b19_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "scale4b19_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2a" + bottom: "res4b19_branch2a" + name: "res4b19_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2b" + name: "res4b19_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "bn4b19_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "scale4b19_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2b" + bottom: "res4b19_branch2b" + name: "res4b19_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2c" + name: "res4b19_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "bn4b19_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "scale4b19_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18" + bottom: "res4b19_branch2c" + top: "res4b19" + name: "res4b19" + type: "Eltwise" +} + +layer { + bottom: "res4b19" + top: "res4b19" + name: "res4b19_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19" + top: "res4b20_branch2a" + name: "res4b20_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "bn4b20_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "scale4b20_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2a" + bottom: "res4b20_branch2a" + name: "res4b20_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2b" + name: "res4b20_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "bn4b20_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "scale4b20_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2b" + bottom: "res4b20_branch2b" + name: "res4b20_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2c" + name: "res4b20_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "bn4b20_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "scale4b20_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19" + bottom: "res4b20_branch2c" + top: "res4b20" + name: "res4b20" + type: "Eltwise" +} + +layer { + bottom: "res4b20" + top: "res4b20" + name: "res4b20_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20" + top: "res4b21_branch2a" + name: "res4b21_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "bn4b21_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "scale4b21_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2a" + bottom: "res4b21_branch2a" + name: "res4b21_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2b" + name: "res4b21_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "bn4b21_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "scale4b21_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2b" + bottom: "res4b21_branch2b" + name: "res4b21_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2c" + name: "res4b21_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "bn4b21_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "scale4b21_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20" + bottom: "res4b21_branch2c" + top: "res4b21" + name: "res4b21" + type: "Eltwise" +} + +layer { + bottom: "res4b21" + top: "res4b21" + name: "res4b21_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21" + top: "res4b22_branch2a" + name: "res4b22_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "bn4b22_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "scale4b22_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2a" + bottom: "res4b22_branch2a" + name: "res4b22_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2b" + name: "res4b22_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "bn4b22_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "scale4b22_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2b" + bottom: "res4b22_branch2b" + name: "res4b22_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2c" + name: "res4b22_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "bn4b22_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "scale4b22_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21" + bottom: "res4b22_branch2c" + top: "res4b22" + name: "res4b22" + type: "Eltwise" +} + +layer { + bottom: "res4b22" + top: "res4b22" + name: "res4b22_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2a" + bottom: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2b" + bottom: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2a" + bottom: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2b" + bottom: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2a" + bottom: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2b" + bottom: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 4116 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 84 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred_pre" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob_pre" +} + +layer { + name: "cls_prob_reshape" + type: "Reshape" + bottom: "cls_prob_pre" + top: "cls_prob" + reshape_param { + shape { + dim: -1 + dim: 21 + } + } +} + +layer { + name: "bbox_pred_reshape" + type: "Reshape" + bottom: "bbox_pred_pre" + top: "bbox_pred" + reshape_param { + shape { + dim: -1 + dim: 84 + } + } +} + + diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/train_ohem.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/train_ohem.prototxt new file mode 100644 index 0000000..ae3ce44 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/train_ohem.prototxt @@ -0,0 +1,7343 @@ +name: "ResNet-101" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "conv1" + bottom: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2a" + bottom: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2b" + bottom: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2a" + bottom: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2b" + bottom: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2a" + bottom: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2b" + bottom: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2a" + bottom: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2b" + bottom: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b1_branch2a" + name: "res3b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "bn3b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "scale3b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2a" + bottom: "res3b1_branch2a" + name: "res3b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2b" + name: "res3b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "bn3b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "scale3b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2b" + bottom: "res3b1_branch2b" + name: "res3b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2c" + name: "res3b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "bn3b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "scale3b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b1_branch2c" + top: "res3b1" + name: "res3b1" + type: "Eltwise" +} + +layer { + bottom: "res3b1" + top: "res3b1" + name: "res3b1_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1" + top: "res3b2_branch2a" + name: "res3b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "bn3b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "scale3b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2a" + bottom: "res3b2_branch2a" + name: "res3b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2b" + name: "res3b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "bn3b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "scale3b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2b" + bottom: "res3b2_branch2b" + name: "res3b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2c" + name: "res3b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "bn3b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "scale3b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1" + bottom: "res3b2_branch2c" + top: "res3b2" + name: "res3b2" + type: "Eltwise" +} + +layer { + bottom: "res3b2" + top: "res3b2" + name: "res3b2_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2" + top: "res3b3_branch2a" + name: "res3b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "bn3b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "scale3b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2a" + bottom: "res3b3_branch2a" + name: "res3b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2b" + name: "res3b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "bn3b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "scale3b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2b" + bottom: "res3b3_branch2b" + name: "res3b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2c" + name: "res3b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "bn3b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "scale3b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2" + bottom: "res3b3_branch2c" + top: "res3b3" + name: "res3b3" + type: "Eltwise" +} + +layer { + bottom: "res3b3" + top: "res3b3" + name: "res3b3_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2a" + bottom: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2b" + bottom: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b1_branch2a" + name: "res4b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "bn4b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "scale4b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2a" + bottom: "res4b1_branch2a" + name: "res4b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2b" + name: "res4b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "bn4b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "scale4b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2b" + bottom: "res4b1_branch2b" + name: "res4b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2c" + name: "res4b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "bn4b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "scale4b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b1_branch2c" + top: "res4b1" + name: "res4b1" + type: "Eltwise" +} + +layer { + bottom: "res4b1" + top: "res4b1" + name: "res4b1_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1" + top: "res4b2_branch2a" + name: "res4b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "bn4b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "scale4b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2a" + bottom: "res4b2_branch2a" + name: "res4b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2b" + name: "res4b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "bn4b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "scale4b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2b" + bottom: "res4b2_branch2b" + name: "res4b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2c" + name: "res4b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "bn4b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "scale4b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1" + bottom: "res4b2_branch2c" + top: "res4b2" + name: "res4b2" + type: "Eltwise" +} + +layer { + bottom: "res4b2" + top: "res4b2" + name: "res4b2_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2" + top: "res4b3_branch2a" + name: "res4b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "bn4b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "scale4b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2a" + bottom: "res4b3_branch2a" + name: "res4b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2b" + name: "res4b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "bn4b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "scale4b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2b" + bottom: "res4b3_branch2b" + name: "res4b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2c" + name: "res4b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "bn4b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "scale4b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2" + bottom: "res4b3_branch2c" + top: "res4b3" + name: "res4b3" + type: "Eltwise" +} + +layer { + bottom: "res4b3" + top: "res4b3" + name: "res4b3_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3" + top: "res4b4_branch2a" + name: "res4b4_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "bn4b4_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "scale4b4_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2a" + bottom: "res4b4_branch2a" + name: "res4b4_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2b" + name: "res4b4_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "bn4b4_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "scale4b4_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2b" + bottom: "res4b4_branch2b" + name: "res4b4_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2c" + name: "res4b4_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "bn4b4_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "scale4b4_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3" + bottom: "res4b4_branch2c" + top: "res4b4" + name: "res4b4" + type: "Eltwise" +} + +layer { + bottom: "res4b4" + top: "res4b4" + name: "res4b4_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4" + top: "res4b5_branch2a" + name: "res4b5_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "bn4b5_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "scale4b5_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2a" + bottom: "res4b5_branch2a" + name: "res4b5_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2b" + name: "res4b5_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "bn4b5_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "scale4b5_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2b" + bottom: "res4b5_branch2b" + name: "res4b5_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2c" + name: "res4b5_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "bn4b5_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "scale4b5_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4" + bottom: "res4b5_branch2c" + top: "res4b5" + name: "res4b5" + type: "Eltwise" +} + +layer { + bottom: "res4b5" + top: "res4b5" + name: "res4b5_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5" + top: "res4b6_branch2a" + name: "res4b6_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "bn4b6_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "scale4b6_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2a" + bottom: "res4b6_branch2a" + name: "res4b6_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2b" + name: "res4b6_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "bn4b6_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "scale4b6_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2b" + bottom: "res4b6_branch2b" + name: "res4b6_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2c" + name: "res4b6_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "bn4b6_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "scale4b6_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5" + bottom: "res4b6_branch2c" + top: "res4b6" + name: "res4b6" + type: "Eltwise" +} + +layer { + bottom: "res4b6" + top: "res4b6" + name: "res4b6_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6" + top: "res4b7_branch2a" + name: "res4b7_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "bn4b7_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "scale4b7_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2a" + bottom: "res4b7_branch2a" + name: "res4b7_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2b" + name: "res4b7_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "bn4b7_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "scale4b7_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2b" + bottom: "res4b7_branch2b" + name: "res4b7_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2c" + name: "res4b7_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "bn4b7_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "scale4b7_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6" + bottom: "res4b7_branch2c" + top: "res4b7" + name: "res4b7" + type: "Eltwise" +} + +layer { + bottom: "res4b7" + top: "res4b7" + name: "res4b7_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7" + top: "res4b8_branch2a" + name: "res4b8_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "bn4b8_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "scale4b8_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2a" + bottom: "res4b8_branch2a" + name: "res4b8_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2b" + name: "res4b8_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "bn4b8_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "scale4b8_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2b" + bottom: "res4b8_branch2b" + name: "res4b8_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2c" + name: "res4b8_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "bn4b8_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "scale4b8_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7" + bottom: "res4b8_branch2c" + top: "res4b8" + name: "res4b8" + type: "Eltwise" +} + +layer { + bottom: "res4b8" + top: "res4b8" + name: "res4b8_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8" + top: "res4b9_branch2a" + name: "res4b9_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "bn4b9_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "scale4b9_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2a" + bottom: "res4b9_branch2a" + name: "res4b9_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2b" + name: "res4b9_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "bn4b9_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "scale4b9_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2b" + bottom: "res4b9_branch2b" + name: "res4b9_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2c" + name: "res4b9_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "bn4b9_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "scale4b9_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8" + bottom: "res4b9_branch2c" + top: "res4b9" + name: "res4b9" + type: "Eltwise" +} + +layer { + bottom: "res4b9" + top: "res4b9" + name: "res4b9_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9" + top: "res4b10_branch2a" + name: "res4b10_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "bn4b10_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "scale4b10_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2a" + bottom: "res4b10_branch2a" + name: "res4b10_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2b" + name: "res4b10_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "bn4b10_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "scale4b10_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2b" + bottom: "res4b10_branch2b" + name: "res4b10_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2c" + name: "res4b10_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "bn4b10_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "scale4b10_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9" + bottom: "res4b10_branch2c" + top: "res4b10" + name: "res4b10" + type: "Eltwise" +} + +layer { + bottom: "res4b10" + top: "res4b10" + name: "res4b10_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10" + top: "res4b11_branch2a" + name: "res4b11_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "bn4b11_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "scale4b11_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2a" + bottom: "res4b11_branch2a" + name: "res4b11_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2b" + name: "res4b11_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "bn4b11_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "scale4b11_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2b" + bottom: "res4b11_branch2b" + name: "res4b11_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2c" + name: "res4b11_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "bn4b11_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "scale4b11_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10" + bottom: "res4b11_branch2c" + top: "res4b11" + name: "res4b11" + type: "Eltwise" +} + +layer { + bottom: "res4b11" + top: "res4b11" + name: "res4b11_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11" + top: "res4b12_branch2a" + name: "res4b12_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "bn4b12_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "scale4b12_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2a" + bottom: "res4b12_branch2a" + name: "res4b12_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2b" + name: "res4b12_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "bn4b12_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "scale4b12_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2b" + bottom: "res4b12_branch2b" + name: "res4b12_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2c" + name: "res4b12_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "bn4b12_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "scale4b12_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11" + bottom: "res4b12_branch2c" + top: "res4b12" + name: "res4b12" + type: "Eltwise" +} + +layer { + bottom: "res4b12" + top: "res4b12" + name: "res4b12_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12" + top: "res4b13_branch2a" + name: "res4b13_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "bn4b13_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "scale4b13_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2a" + bottom: "res4b13_branch2a" + name: "res4b13_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2b" + name: "res4b13_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "bn4b13_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "scale4b13_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2b" + bottom: "res4b13_branch2b" + name: "res4b13_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2c" + name: "res4b13_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "bn4b13_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "scale4b13_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12" + bottom: "res4b13_branch2c" + top: "res4b13" + name: "res4b13" + type: "Eltwise" +} + +layer { + bottom: "res4b13" + top: "res4b13" + name: "res4b13_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13" + top: "res4b14_branch2a" + name: "res4b14_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "bn4b14_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "scale4b14_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2a" + bottom: "res4b14_branch2a" + name: "res4b14_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2b" + name: "res4b14_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "bn4b14_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "scale4b14_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2b" + bottom: "res4b14_branch2b" + name: "res4b14_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2c" + name: "res4b14_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "bn4b14_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "scale4b14_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13" + bottom: "res4b14_branch2c" + top: "res4b14" + name: "res4b14" + type: "Eltwise" +} + +layer { + bottom: "res4b14" + top: "res4b14" + name: "res4b14_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14" + top: "res4b15_branch2a" + name: "res4b15_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "bn4b15_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "scale4b15_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2a" + bottom: "res4b15_branch2a" + name: "res4b15_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2b" + name: "res4b15_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "bn4b15_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "scale4b15_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2b" + bottom: "res4b15_branch2b" + name: "res4b15_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2c" + name: "res4b15_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "bn4b15_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "scale4b15_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14" + bottom: "res4b15_branch2c" + top: "res4b15" + name: "res4b15" + type: "Eltwise" +} + +layer { + bottom: "res4b15" + top: "res4b15" + name: "res4b15_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15" + top: "res4b16_branch2a" + name: "res4b16_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "bn4b16_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "scale4b16_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2a" + bottom: "res4b16_branch2a" + name: "res4b16_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2b" + name: "res4b16_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "bn4b16_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "scale4b16_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2b" + bottom: "res4b16_branch2b" + name: "res4b16_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2c" + name: "res4b16_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "bn4b16_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "scale4b16_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15" + bottom: "res4b16_branch2c" + top: "res4b16" + name: "res4b16" + type: "Eltwise" +} + +layer { + bottom: "res4b16" + top: "res4b16" + name: "res4b16_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16" + top: "res4b17_branch2a" + name: "res4b17_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "bn4b17_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "scale4b17_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2a" + bottom: "res4b17_branch2a" + name: "res4b17_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2b" + name: "res4b17_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "bn4b17_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "scale4b17_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2b" + bottom: "res4b17_branch2b" + name: "res4b17_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2c" + name: "res4b17_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "bn4b17_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "scale4b17_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16" + bottom: "res4b17_branch2c" + top: "res4b17" + name: "res4b17" + type: "Eltwise" +} + +layer { + bottom: "res4b17" + top: "res4b17" + name: "res4b17_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17" + top: "res4b18_branch2a" + name: "res4b18_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "bn4b18_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "scale4b18_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2a" + bottom: "res4b18_branch2a" + name: "res4b18_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2b" + name: "res4b18_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "bn4b18_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "scale4b18_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2b" + bottom: "res4b18_branch2b" + name: "res4b18_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2c" + name: "res4b18_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "bn4b18_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "scale4b18_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17" + bottom: "res4b18_branch2c" + top: "res4b18" + name: "res4b18" + type: "Eltwise" +} + +layer { + bottom: "res4b18" + top: "res4b18" + name: "res4b18_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18" + top: "res4b19_branch2a" + name: "res4b19_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "bn4b19_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "scale4b19_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2a" + bottom: "res4b19_branch2a" + name: "res4b19_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2b" + name: "res4b19_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "bn4b19_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "scale4b19_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2b" + bottom: "res4b19_branch2b" + name: "res4b19_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2c" + name: "res4b19_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "bn4b19_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "scale4b19_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18" + bottom: "res4b19_branch2c" + top: "res4b19" + name: "res4b19" + type: "Eltwise" +} + +layer { + bottom: "res4b19" + top: "res4b19" + name: "res4b19_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19" + top: "res4b20_branch2a" + name: "res4b20_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "bn4b20_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "scale4b20_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2a" + bottom: "res4b20_branch2a" + name: "res4b20_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2b" + name: "res4b20_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "bn4b20_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "scale4b20_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2b" + bottom: "res4b20_branch2b" + name: "res4b20_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2c" + name: "res4b20_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "bn4b20_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "scale4b20_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19" + bottom: "res4b20_branch2c" + top: "res4b20" + name: "res4b20" + type: "Eltwise" +} + +layer { + bottom: "res4b20" + top: "res4b20" + name: "res4b20_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20" + top: "res4b21_branch2a" + name: "res4b21_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "bn4b21_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "scale4b21_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2a" + bottom: "res4b21_branch2a" + name: "res4b21_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2b" + name: "res4b21_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "bn4b21_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "scale4b21_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2b" + bottom: "res4b21_branch2b" + name: "res4b21_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2c" + name: "res4b21_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "bn4b21_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "scale4b21_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20" + bottom: "res4b21_branch2c" + top: "res4b21" + name: "res4b21" + type: "Eltwise" +} + +layer { + bottom: "res4b21" + top: "res4b21" + name: "res4b21_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21" + top: "res4b22_branch2a" + name: "res4b22_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "bn4b22_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "scale4b22_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2a" + bottom: "res4b22_branch2a" + name: "res4b22_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2b" + name: "res4b22_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "bn4b22_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "scale4b22_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2b" + bottom: "res4b22_branch2b" + name: "res4b22_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2c" + name: "res4b22_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "bn4b22_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "scale4b22_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21" + bottom: "res4b22_branch2c" + top: "res4b22" + name: "res4b22" + type: "Eltwise" +} + +layer { + bottom: "res4b22" + top: "res4b22" + name: "res4b22_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2a" + bottom: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2b" + bottom: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2a" + bottom: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2b" + bottom: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2a" + bottom: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2b" + bottom: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 21" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 4116 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 84 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#--------------online hard example mining-------------- +layer { + name: "per_roi_loss_cls" + type: "SoftmaxWithLossOHEM" + bottom: "cls_score" + bottom: "labels" + top: "temp_loss_cls" + top: "temp_prob_cls" + top: "per_roi_loss_cls" + loss_weight: 0 + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + top: "temp_loss_bbox" + top: "per_roi_loss_bbox" + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss" + type: "Eltwise" + bottom: "per_roi_loss_cls" + bottom: "per_roi_loss_bbox" + top: "per_roi_loss" + propagate_down: false + propagate_down: false +} + +layer { + bottom: "rois" + bottom: "per_roi_loss" + bottom: "labels" + bottom: "bbox_inside_weights" + top: "labels_ohem" + top: "bbox_loss_weights_ohem" + name: "annotator_detector" + type: "BoxAnnotatorOHEM" + box_annotator_ohem_param { + roi_per_img: 128 + ignore_label: -1 + } + propagate_down: false + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" + bottom: "temp_loss_cls" + bottom: "temp_prob_cls" + bottom: "temp_loss_bbox" +} + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels_ohem" + top: "loss_cls" + loss_weight: 1 + loss_param { + ignore_label: -1 + } + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels_ohem" + top: "accuarcy" + #include: { phase: TEST } + accuracy_param { + ignore_label: -1 + } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_loss_weights_ohem" + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/solver.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/solver.prototxt new file mode 100644 index 0000000..40e0173 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 80000 +display: 20 + +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet101_rfcn" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem.prototxt new file mode 100644 index 0000000..5247534 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 80000 +display: 20 + +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet101_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem_continue.prototxt~ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem_continue.prototxt~ new file mode 100644 index 0000000..e4b9335 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem_continue.prototxt~ @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt" +base_lr: 0.001 +lr_policy: "multistep" +gamma: 0.1 +stepvalue: 10000 +display: 20 + +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet101_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup.prototxt new file mode 100644 index 0000000..39e5912 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup.prototxt @@ -0,0 +1,14 @@ +train_net: "models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt" +lr_policy: "fixed" +base_lr: 0.0001 + +display: 20 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet101_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup_continue.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup_continue.prototxt new file mode 100644 index 0000000..06d470c --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup_continue.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 70000 + +display: 20 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet101_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/test_agonistic.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/test_agonistic.prototxt new file mode 100644 index 0000000..a2b0965 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/test_agonistic.prototxt @@ -0,0 +1,7186 @@ +name: "ResNet-101" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "conv1" + bottom: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2a" + bottom: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2b" + bottom: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2a" + bottom: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2b" + bottom: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2a" + bottom: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2b" + bottom: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2a" + bottom: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2b" + bottom: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b1_branch2a" + name: "res3b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "bn3b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "scale3b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2a" + bottom: "res3b1_branch2a" + name: "res3b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2b" + name: "res3b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "bn3b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "scale3b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2b" + bottom: "res3b1_branch2b" + name: "res3b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2c" + name: "res3b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "bn3b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "scale3b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b1_branch2c" + top: "res3b1" + name: "res3b1" + type: "Eltwise" +} + +layer { + bottom: "res3b1" + top: "res3b1" + name: "res3b1_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1" + top: "res3b2_branch2a" + name: "res3b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "bn3b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "scale3b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2a" + bottom: "res3b2_branch2a" + name: "res3b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2b" + name: "res3b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "bn3b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "scale3b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2b" + bottom: "res3b2_branch2b" + name: "res3b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2c" + name: "res3b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "bn3b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "scale3b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1" + bottom: "res3b2_branch2c" + top: "res3b2" + name: "res3b2" + type: "Eltwise" +} + +layer { + bottom: "res3b2" + top: "res3b2" + name: "res3b2_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2" + top: "res3b3_branch2a" + name: "res3b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "bn3b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "scale3b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2a" + bottom: "res3b3_branch2a" + name: "res3b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2b" + name: "res3b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "bn3b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "scale3b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2b" + bottom: "res3b3_branch2b" + name: "res3b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2c" + name: "res3b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "bn3b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "scale3b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2" + bottom: "res3b3_branch2c" + top: "res3b3" + name: "res3b3" + type: "Eltwise" +} + +layer { + bottom: "res3b3" + top: "res3b3" + name: "res3b3_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2a" + bottom: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2b" + bottom: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b1_branch2a" + name: "res4b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "bn4b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "scale4b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2a" + bottom: "res4b1_branch2a" + name: "res4b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2b" + name: "res4b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "bn4b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "scale4b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2b" + bottom: "res4b1_branch2b" + name: "res4b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2c" + name: "res4b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "bn4b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "scale4b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b1_branch2c" + top: "res4b1" + name: "res4b1" + type: "Eltwise" +} + +layer { + bottom: "res4b1" + top: "res4b1" + name: "res4b1_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1" + top: "res4b2_branch2a" + name: "res4b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "bn4b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "scale4b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2a" + bottom: "res4b2_branch2a" + name: "res4b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2b" + name: "res4b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "bn4b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "scale4b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2b" + bottom: "res4b2_branch2b" + name: "res4b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2c" + name: "res4b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "bn4b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "scale4b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1" + bottom: "res4b2_branch2c" + top: "res4b2" + name: "res4b2" + type: "Eltwise" +} + +layer { + bottom: "res4b2" + top: "res4b2" + name: "res4b2_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2" + top: "res4b3_branch2a" + name: "res4b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "bn4b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "scale4b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2a" + bottom: "res4b3_branch2a" + name: "res4b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2b" + name: "res4b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "bn4b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "scale4b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2b" + bottom: "res4b3_branch2b" + name: "res4b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2c" + name: "res4b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "bn4b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "scale4b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2" + bottom: "res4b3_branch2c" + top: "res4b3" + name: "res4b3" + type: "Eltwise" +} + +layer { + bottom: "res4b3" + top: "res4b3" + name: "res4b3_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3" + top: "res4b4_branch2a" + name: "res4b4_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "bn4b4_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "scale4b4_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2a" + bottom: "res4b4_branch2a" + name: "res4b4_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2b" + name: "res4b4_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "bn4b4_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "scale4b4_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2b" + bottom: "res4b4_branch2b" + name: "res4b4_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2c" + name: "res4b4_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "bn4b4_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "scale4b4_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3" + bottom: "res4b4_branch2c" + top: "res4b4" + name: "res4b4" + type: "Eltwise" +} + +layer { + bottom: "res4b4" + top: "res4b4" + name: "res4b4_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4" + top: "res4b5_branch2a" + name: "res4b5_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "bn4b5_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "scale4b5_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2a" + bottom: "res4b5_branch2a" + name: "res4b5_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2b" + name: "res4b5_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "bn4b5_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "scale4b5_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2b" + bottom: "res4b5_branch2b" + name: "res4b5_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2c" + name: "res4b5_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "bn4b5_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "scale4b5_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4" + bottom: "res4b5_branch2c" + top: "res4b5" + name: "res4b5" + type: "Eltwise" +} + +layer { + bottom: "res4b5" + top: "res4b5" + name: "res4b5_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5" + top: "res4b6_branch2a" + name: "res4b6_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "bn4b6_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "scale4b6_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2a" + bottom: "res4b6_branch2a" + name: "res4b6_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2b" + name: "res4b6_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "bn4b6_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "scale4b6_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2b" + bottom: "res4b6_branch2b" + name: "res4b6_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2c" + name: "res4b6_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "bn4b6_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "scale4b6_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5" + bottom: "res4b6_branch2c" + top: "res4b6" + name: "res4b6" + type: "Eltwise" +} + +layer { + bottom: "res4b6" + top: "res4b6" + name: "res4b6_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6" + top: "res4b7_branch2a" + name: "res4b7_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "bn4b7_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "scale4b7_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2a" + bottom: "res4b7_branch2a" + name: "res4b7_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2b" + name: "res4b7_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "bn4b7_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "scale4b7_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2b" + bottom: "res4b7_branch2b" + name: "res4b7_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2c" + name: "res4b7_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "bn4b7_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "scale4b7_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6" + bottom: "res4b7_branch2c" + top: "res4b7" + name: "res4b7" + type: "Eltwise" +} + +layer { + bottom: "res4b7" + top: "res4b7" + name: "res4b7_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7" + top: "res4b8_branch2a" + name: "res4b8_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "bn4b8_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "scale4b8_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2a" + bottom: "res4b8_branch2a" + name: "res4b8_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2b" + name: "res4b8_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "bn4b8_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "scale4b8_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2b" + bottom: "res4b8_branch2b" + name: "res4b8_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2c" + name: "res4b8_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "bn4b8_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "scale4b8_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7" + bottom: "res4b8_branch2c" + top: "res4b8" + name: "res4b8" + type: "Eltwise" +} + +layer { + bottom: "res4b8" + top: "res4b8" + name: "res4b8_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8" + top: "res4b9_branch2a" + name: "res4b9_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "bn4b9_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "scale4b9_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2a" + bottom: "res4b9_branch2a" + name: "res4b9_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2b" + name: "res4b9_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "bn4b9_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "scale4b9_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2b" + bottom: "res4b9_branch2b" + name: "res4b9_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2c" + name: "res4b9_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "bn4b9_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "scale4b9_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8" + bottom: "res4b9_branch2c" + top: "res4b9" + name: "res4b9" + type: "Eltwise" +} + +layer { + bottom: "res4b9" + top: "res4b9" + name: "res4b9_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9" + top: "res4b10_branch2a" + name: "res4b10_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "bn4b10_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "scale4b10_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2a" + bottom: "res4b10_branch2a" + name: "res4b10_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2b" + name: "res4b10_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "bn4b10_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "scale4b10_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2b" + bottom: "res4b10_branch2b" + name: "res4b10_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2c" + name: "res4b10_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "bn4b10_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "scale4b10_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9" + bottom: "res4b10_branch2c" + top: "res4b10" + name: "res4b10" + type: "Eltwise" +} + +layer { + bottom: "res4b10" + top: "res4b10" + name: "res4b10_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10" + top: "res4b11_branch2a" + name: "res4b11_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "bn4b11_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "scale4b11_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2a" + bottom: "res4b11_branch2a" + name: "res4b11_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2b" + name: "res4b11_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "bn4b11_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "scale4b11_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2b" + bottom: "res4b11_branch2b" + name: "res4b11_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2c" + name: "res4b11_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "bn4b11_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "scale4b11_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10" + bottom: "res4b11_branch2c" + top: "res4b11" + name: "res4b11" + type: "Eltwise" +} + +layer { + bottom: "res4b11" + top: "res4b11" + name: "res4b11_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11" + top: "res4b12_branch2a" + name: "res4b12_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "bn4b12_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "scale4b12_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2a" + bottom: "res4b12_branch2a" + name: "res4b12_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2b" + name: "res4b12_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "bn4b12_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "scale4b12_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2b" + bottom: "res4b12_branch2b" + name: "res4b12_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2c" + name: "res4b12_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "bn4b12_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "scale4b12_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11" + bottom: "res4b12_branch2c" + top: "res4b12" + name: "res4b12" + type: "Eltwise" +} + +layer { + bottom: "res4b12" + top: "res4b12" + name: "res4b12_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12" + top: "res4b13_branch2a" + name: "res4b13_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "bn4b13_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "scale4b13_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2a" + bottom: "res4b13_branch2a" + name: "res4b13_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2b" + name: "res4b13_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "bn4b13_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "scale4b13_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2b" + bottom: "res4b13_branch2b" + name: "res4b13_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2c" + name: "res4b13_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "bn4b13_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "scale4b13_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12" + bottom: "res4b13_branch2c" + top: "res4b13" + name: "res4b13" + type: "Eltwise" +} + +layer { + bottom: "res4b13" + top: "res4b13" + name: "res4b13_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13" + top: "res4b14_branch2a" + name: "res4b14_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "bn4b14_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "scale4b14_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2a" + bottom: "res4b14_branch2a" + name: "res4b14_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2b" + name: "res4b14_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "bn4b14_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "scale4b14_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2b" + bottom: "res4b14_branch2b" + name: "res4b14_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2c" + name: "res4b14_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "bn4b14_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "scale4b14_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13" + bottom: "res4b14_branch2c" + top: "res4b14" + name: "res4b14" + type: "Eltwise" +} + +layer { + bottom: "res4b14" + top: "res4b14" + name: "res4b14_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14" + top: "res4b15_branch2a" + name: "res4b15_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "bn4b15_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "scale4b15_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2a" + bottom: "res4b15_branch2a" + name: "res4b15_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2b" + name: "res4b15_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "bn4b15_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "scale4b15_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2b" + bottom: "res4b15_branch2b" + name: "res4b15_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2c" + name: "res4b15_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "bn4b15_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "scale4b15_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14" + bottom: "res4b15_branch2c" + top: "res4b15" + name: "res4b15" + type: "Eltwise" +} + +layer { + bottom: "res4b15" + top: "res4b15" + name: "res4b15_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15" + top: "res4b16_branch2a" + name: "res4b16_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "bn4b16_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "scale4b16_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2a" + bottom: "res4b16_branch2a" + name: "res4b16_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2b" + name: "res4b16_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "bn4b16_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "scale4b16_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2b" + bottom: "res4b16_branch2b" + name: "res4b16_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2c" + name: "res4b16_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "bn4b16_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "scale4b16_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15" + bottom: "res4b16_branch2c" + top: "res4b16" + name: "res4b16" + type: "Eltwise" +} + +layer { + bottom: "res4b16" + top: "res4b16" + name: "res4b16_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16" + top: "res4b17_branch2a" + name: "res4b17_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "bn4b17_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "scale4b17_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2a" + bottom: "res4b17_branch2a" + name: "res4b17_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2b" + name: "res4b17_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "bn4b17_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "scale4b17_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2b" + bottom: "res4b17_branch2b" + name: "res4b17_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2c" + name: "res4b17_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "bn4b17_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "scale4b17_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16" + bottom: "res4b17_branch2c" + top: "res4b17" + name: "res4b17" + type: "Eltwise" +} + +layer { + bottom: "res4b17" + top: "res4b17" + name: "res4b17_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17" + top: "res4b18_branch2a" + name: "res4b18_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "bn4b18_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "scale4b18_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2a" + bottom: "res4b18_branch2a" + name: "res4b18_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2b" + name: "res4b18_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "bn4b18_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "scale4b18_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2b" + bottom: "res4b18_branch2b" + name: "res4b18_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2c" + name: "res4b18_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "bn4b18_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "scale4b18_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17" + bottom: "res4b18_branch2c" + top: "res4b18" + name: "res4b18" + type: "Eltwise" +} + +layer { + bottom: "res4b18" + top: "res4b18" + name: "res4b18_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18" + top: "res4b19_branch2a" + name: "res4b19_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "bn4b19_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "scale4b19_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2a" + bottom: "res4b19_branch2a" + name: "res4b19_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2b" + name: "res4b19_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "bn4b19_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "scale4b19_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2b" + bottom: "res4b19_branch2b" + name: "res4b19_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2c" + name: "res4b19_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "bn4b19_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "scale4b19_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18" + bottom: "res4b19_branch2c" + top: "res4b19" + name: "res4b19" + type: "Eltwise" +} + +layer { + bottom: "res4b19" + top: "res4b19" + name: "res4b19_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19" + top: "res4b20_branch2a" + name: "res4b20_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "bn4b20_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "scale4b20_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2a" + bottom: "res4b20_branch2a" + name: "res4b20_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2b" + name: "res4b20_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "bn4b20_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "scale4b20_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2b" + bottom: "res4b20_branch2b" + name: "res4b20_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2c" + name: "res4b20_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "bn4b20_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "scale4b20_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19" + bottom: "res4b20_branch2c" + top: "res4b20" + name: "res4b20" + type: "Eltwise" +} + +layer { + bottom: "res4b20" + top: "res4b20" + name: "res4b20_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20" + top: "res4b21_branch2a" + name: "res4b21_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "bn4b21_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "scale4b21_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2a" + bottom: "res4b21_branch2a" + name: "res4b21_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2b" + name: "res4b21_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "bn4b21_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "scale4b21_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2b" + bottom: "res4b21_branch2b" + name: "res4b21_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2c" + name: "res4b21_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "bn4b21_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "scale4b21_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20" + bottom: "res4b21_branch2c" + top: "res4b21" + name: "res4b21" + type: "Eltwise" +} + +layer { + bottom: "res4b21" + top: "res4b21" + name: "res4b21_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21" + top: "res4b22_branch2a" + name: "res4b22_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "bn4b22_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "scale4b22_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2a" + bottom: "res4b22_branch2a" + name: "res4b22_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2b" + name: "res4b22_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "bn4b22_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "scale4b22_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2b" + bottom: "res4b22_branch2b" + name: "res4b22_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2c" + name: "res4b22_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "bn4b22_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "scale4b22_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21" + bottom: "res4b22_branch2c" + top: "res4b22" + name: "res4b22" + type: "Eltwise" +} + +layer { + bottom: "res4b22" + top: "res4b22" + name: "res4b22_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2a" + bottom: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2b" + bottom: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2a" + bottom: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2b" + bottom: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2a" + bottom: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2b" + bottom: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred_pre" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob_pre" +} + +layer { + name: "cls_prob_reshape" + type: "Reshape" + bottom: "cls_prob_pre" + top: "cls_prob" + reshape_param { + shape { + dim: -1 + dim: 21 + } + } +} + +layer { + name: "bbox_pred_reshape" + type: "Reshape" + bottom: "bbox_pred_pre" + top: "bbox_pred" + reshape_param { + shape { + dim: -1 + dim: 8 + } + } +} + + diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic.prototxt new file mode 100644 index 0000000..4580c66 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic.prototxt @@ -0,0 +1,7274 @@ +name: "ResNet-101" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "conv1" + bottom: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2a" + bottom: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2b" + bottom: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2a" + bottom: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2b" + bottom: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2a" + bottom: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2b" + bottom: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2a" + bottom: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2b" + bottom: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b1_branch2a" + name: "res3b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "bn3b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "scale3b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2a" + bottom: "res3b1_branch2a" + name: "res3b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2b" + name: "res3b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "bn3b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "scale3b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2b" + bottom: "res3b1_branch2b" + name: "res3b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2c" + name: "res3b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "bn3b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "scale3b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b1_branch2c" + top: "res3b1" + name: "res3b1" + type: "Eltwise" +} + +layer { + bottom: "res3b1" + top: "res3b1" + name: "res3b1_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1" + top: "res3b2_branch2a" + name: "res3b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "bn3b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "scale3b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2a" + bottom: "res3b2_branch2a" + name: "res3b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2b" + name: "res3b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "bn3b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "scale3b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2b" + bottom: "res3b2_branch2b" + name: "res3b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2c" + name: "res3b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "bn3b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "scale3b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1" + bottom: "res3b2_branch2c" + top: "res3b2" + name: "res3b2" + type: "Eltwise" +} + +layer { + bottom: "res3b2" + top: "res3b2" + name: "res3b2_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2" + top: "res3b3_branch2a" + name: "res3b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "bn3b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "scale3b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2a" + bottom: "res3b3_branch2a" + name: "res3b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2b" + name: "res3b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "bn3b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "scale3b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2b" + bottom: "res3b3_branch2b" + name: "res3b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2c" + name: "res3b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "bn3b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "scale3b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2" + bottom: "res3b3_branch2c" + top: "res3b3" + name: "res3b3" + type: "Eltwise" +} + +layer { + bottom: "res3b3" + top: "res3b3" + name: "res3b3_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2a" + bottom: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2b" + bottom: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b1_branch2a" + name: "res4b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "bn4b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "scale4b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2a" + bottom: "res4b1_branch2a" + name: "res4b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2b" + name: "res4b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "bn4b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "scale4b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2b" + bottom: "res4b1_branch2b" + name: "res4b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2c" + name: "res4b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "bn4b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "scale4b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b1_branch2c" + top: "res4b1" + name: "res4b1" + type: "Eltwise" +} + +layer { + bottom: "res4b1" + top: "res4b1" + name: "res4b1_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1" + top: "res4b2_branch2a" + name: "res4b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "bn4b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "scale4b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2a" + bottom: "res4b2_branch2a" + name: "res4b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2b" + name: "res4b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "bn4b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "scale4b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2b" + bottom: "res4b2_branch2b" + name: "res4b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2c" + name: "res4b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "bn4b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "scale4b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1" + bottom: "res4b2_branch2c" + top: "res4b2" + name: "res4b2" + type: "Eltwise" +} + +layer { + bottom: "res4b2" + top: "res4b2" + name: "res4b2_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2" + top: "res4b3_branch2a" + name: "res4b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "bn4b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "scale4b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2a" + bottom: "res4b3_branch2a" + name: "res4b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2b" + name: "res4b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "bn4b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "scale4b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2b" + bottom: "res4b3_branch2b" + name: "res4b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2c" + name: "res4b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "bn4b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "scale4b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2" + bottom: "res4b3_branch2c" + top: "res4b3" + name: "res4b3" + type: "Eltwise" +} + +layer { + bottom: "res4b3" + top: "res4b3" + name: "res4b3_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3" + top: "res4b4_branch2a" + name: "res4b4_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "bn4b4_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "scale4b4_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2a" + bottom: "res4b4_branch2a" + name: "res4b4_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2b" + name: "res4b4_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "bn4b4_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "scale4b4_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2b" + bottom: "res4b4_branch2b" + name: "res4b4_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2c" + name: "res4b4_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "bn4b4_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "scale4b4_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3" + bottom: "res4b4_branch2c" + top: "res4b4" + name: "res4b4" + type: "Eltwise" +} + +layer { + bottom: "res4b4" + top: "res4b4" + name: "res4b4_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4" + top: "res4b5_branch2a" + name: "res4b5_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "bn4b5_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "scale4b5_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2a" + bottom: "res4b5_branch2a" + name: "res4b5_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2b" + name: "res4b5_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "bn4b5_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "scale4b5_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2b" + bottom: "res4b5_branch2b" + name: "res4b5_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2c" + name: "res4b5_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "bn4b5_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "scale4b5_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4" + bottom: "res4b5_branch2c" + top: "res4b5" + name: "res4b5" + type: "Eltwise" +} + +layer { + bottom: "res4b5" + top: "res4b5" + name: "res4b5_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5" + top: "res4b6_branch2a" + name: "res4b6_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "bn4b6_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "scale4b6_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2a" + bottom: "res4b6_branch2a" + name: "res4b6_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2b" + name: "res4b6_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "bn4b6_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "scale4b6_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2b" + bottom: "res4b6_branch2b" + name: "res4b6_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2c" + name: "res4b6_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "bn4b6_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "scale4b6_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5" + bottom: "res4b6_branch2c" + top: "res4b6" + name: "res4b6" + type: "Eltwise" +} + +layer { + bottom: "res4b6" + top: "res4b6" + name: "res4b6_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6" + top: "res4b7_branch2a" + name: "res4b7_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "bn4b7_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "scale4b7_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2a" + bottom: "res4b7_branch2a" + name: "res4b7_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2b" + name: "res4b7_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "bn4b7_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "scale4b7_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2b" + bottom: "res4b7_branch2b" + name: "res4b7_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2c" + name: "res4b7_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "bn4b7_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "scale4b7_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6" + bottom: "res4b7_branch2c" + top: "res4b7" + name: "res4b7" + type: "Eltwise" +} + +layer { + bottom: "res4b7" + top: "res4b7" + name: "res4b7_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7" + top: "res4b8_branch2a" + name: "res4b8_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "bn4b8_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "scale4b8_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2a" + bottom: "res4b8_branch2a" + name: "res4b8_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2b" + name: "res4b8_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "bn4b8_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "scale4b8_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2b" + bottom: "res4b8_branch2b" + name: "res4b8_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2c" + name: "res4b8_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "bn4b8_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "scale4b8_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7" + bottom: "res4b8_branch2c" + top: "res4b8" + name: "res4b8" + type: "Eltwise" +} + +layer { + bottom: "res4b8" + top: "res4b8" + name: "res4b8_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8" + top: "res4b9_branch2a" + name: "res4b9_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "bn4b9_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "scale4b9_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2a" + bottom: "res4b9_branch2a" + name: "res4b9_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2b" + name: "res4b9_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "bn4b9_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "scale4b9_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2b" + bottom: "res4b9_branch2b" + name: "res4b9_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2c" + name: "res4b9_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "bn4b9_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "scale4b9_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8" + bottom: "res4b9_branch2c" + top: "res4b9" + name: "res4b9" + type: "Eltwise" +} + +layer { + bottom: "res4b9" + top: "res4b9" + name: "res4b9_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9" + top: "res4b10_branch2a" + name: "res4b10_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "bn4b10_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "scale4b10_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2a" + bottom: "res4b10_branch2a" + name: "res4b10_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2b" + name: "res4b10_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "bn4b10_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "scale4b10_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2b" + bottom: "res4b10_branch2b" + name: "res4b10_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2c" + name: "res4b10_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "bn4b10_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "scale4b10_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9" + bottom: "res4b10_branch2c" + top: "res4b10" + name: "res4b10" + type: "Eltwise" +} + +layer { + bottom: "res4b10" + top: "res4b10" + name: "res4b10_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10" + top: "res4b11_branch2a" + name: "res4b11_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "bn4b11_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "scale4b11_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2a" + bottom: "res4b11_branch2a" + name: "res4b11_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2b" + name: "res4b11_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "bn4b11_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "scale4b11_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2b" + bottom: "res4b11_branch2b" + name: "res4b11_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2c" + name: "res4b11_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "bn4b11_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "scale4b11_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10" + bottom: "res4b11_branch2c" + top: "res4b11" + name: "res4b11" + type: "Eltwise" +} + +layer { + bottom: "res4b11" + top: "res4b11" + name: "res4b11_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11" + top: "res4b12_branch2a" + name: "res4b12_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "bn4b12_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "scale4b12_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2a" + bottom: "res4b12_branch2a" + name: "res4b12_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2b" + name: "res4b12_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "bn4b12_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "scale4b12_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2b" + bottom: "res4b12_branch2b" + name: "res4b12_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2c" + name: "res4b12_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "bn4b12_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "scale4b12_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11" + bottom: "res4b12_branch2c" + top: "res4b12" + name: "res4b12" + type: "Eltwise" +} + +layer { + bottom: "res4b12" + top: "res4b12" + name: "res4b12_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12" + top: "res4b13_branch2a" + name: "res4b13_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "bn4b13_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "scale4b13_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2a" + bottom: "res4b13_branch2a" + name: "res4b13_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2b" + name: "res4b13_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "bn4b13_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "scale4b13_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2b" + bottom: "res4b13_branch2b" + name: "res4b13_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2c" + name: "res4b13_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "bn4b13_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "scale4b13_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12" + bottom: "res4b13_branch2c" + top: "res4b13" + name: "res4b13" + type: "Eltwise" +} + +layer { + bottom: "res4b13" + top: "res4b13" + name: "res4b13_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13" + top: "res4b14_branch2a" + name: "res4b14_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "bn4b14_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "scale4b14_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2a" + bottom: "res4b14_branch2a" + name: "res4b14_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2b" + name: "res4b14_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "bn4b14_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "scale4b14_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2b" + bottom: "res4b14_branch2b" + name: "res4b14_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2c" + name: "res4b14_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "bn4b14_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "scale4b14_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13" + bottom: "res4b14_branch2c" + top: "res4b14" + name: "res4b14" + type: "Eltwise" +} + +layer { + bottom: "res4b14" + top: "res4b14" + name: "res4b14_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14" + top: "res4b15_branch2a" + name: "res4b15_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "bn4b15_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "scale4b15_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2a" + bottom: "res4b15_branch2a" + name: "res4b15_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2b" + name: "res4b15_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "bn4b15_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "scale4b15_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2b" + bottom: "res4b15_branch2b" + name: "res4b15_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2c" + name: "res4b15_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "bn4b15_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "scale4b15_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14" + bottom: "res4b15_branch2c" + top: "res4b15" + name: "res4b15" + type: "Eltwise" +} + +layer { + bottom: "res4b15" + top: "res4b15" + name: "res4b15_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15" + top: "res4b16_branch2a" + name: "res4b16_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "bn4b16_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "scale4b16_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2a" + bottom: "res4b16_branch2a" + name: "res4b16_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2b" + name: "res4b16_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "bn4b16_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "scale4b16_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2b" + bottom: "res4b16_branch2b" + name: "res4b16_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2c" + name: "res4b16_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "bn4b16_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "scale4b16_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15" + bottom: "res4b16_branch2c" + top: "res4b16" + name: "res4b16" + type: "Eltwise" +} + +layer { + bottom: "res4b16" + top: "res4b16" + name: "res4b16_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16" + top: "res4b17_branch2a" + name: "res4b17_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "bn4b17_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "scale4b17_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2a" + bottom: "res4b17_branch2a" + name: "res4b17_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2b" + name: "res4b17_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "bn4b17_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "scale4b17_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2b" + bottom: "res4b17_branch2b" + name: "res4b17_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2c" + name: "res4b17_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "bn4b17_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "scale4b17_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16" + bottom: "res4b17_branch2c" + top: "res4b17" + name: "res4b17" + type: "Eltwise" +} + +layer { + bottom: "res4b17" + top: "res4b17" + name: "res4b17_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17" + top: "res4b18_branch2a" + name: "res4b18_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "bn4b18_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "scale4b18_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2a" + bottom: "res4b18_branch2a" + name: "res4b18_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2b" + name: "res4b18_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "bn4b18_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "scale4b18_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2b" + bottom: "res4b18_branch2b" + name: "res4b18_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2c" + name: "res4b18_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "bn4b18_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "scale4b18_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17" + bottom: "res4b18_branch2c" + top: "res4b18" + name: "res4b18" + type: "Eltwise" +} + +layer { + bottom: "res4b18" + top: "res4b18" + name: "res4b18_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18" + top: "res4b19_branch2a" + name: "res4b19_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "bn4b19_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "scale4b19_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2a" + bottom: "res4b19_branch2a" + name: "res4b19_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2b" + name: "res4b19_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "bn4b19_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "scale4b19_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2b" + bottom: "res4b19_branch2b" + name: "res4b19_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2c" + name: "res4b19_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "bn4b19_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "scale4b19_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18" + bottom: "res4b19_branch2c" + top: "res4b19" + name: "res4b19" + type: "Eltwise" +} + +layer { + bottom: "res4b19" + top: "res4b19" + name: "res4b19_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19" + top: "res4b20_branch2a" + name: "res4b20_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "bn4b20_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "scale4b20_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2a" + bottom: "res4b20_branch2a" + name: "res4b20_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2b" + name: "res4b20_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "bn4b20_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "scale4b20_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2b" + bottom: "res4b20_branch2b" + name: "res4b20_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2c" + name: "res4b20_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "bn4b20_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "scale4b20_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19" + bottom: "res4b20_branch2c" + top: "res4b20" + name: "res4b20" + type: "Eltwise" +} + +layer { + bottom: "res4b20" + top: "res4b20" + name: "res4b20_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20" + top: "res4b21_branch2a" + name: "res4b21_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "bn4b21_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "scale4b21_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2a" + bottom: "res4b21_branch2a" + name: "res4b21_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2b" + name: "res4b21_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "bn4b21_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "scale4b21_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2b" + bottom: "res4b21_branch2b" + name: "res4b21_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2c" + name: "res4b21_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "bn4b21_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "scale4b21_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20" + bottom: "res4b21_branch2c" + top: "res4b21" + name: "res4b21" + type: "Eltwise" +} + +layer { + bottom: "res4b21" + top: "res4b21" + name: "res4b21_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21" + top: "res4b22_branch2a" + name: "res4b22_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "bn4b22_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "scale4b22_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2a" + bottom: "res4b22_branch2a" + name: "res4b22_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2b" + name: "res4b22_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "bn4b22_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "scale4b22_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2b" + bottom: "res4b22_branch2b" + name: "res4b22_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2c" + name: "res4b22_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "bn4b22_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "scale4b22_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21" + bottom: "res4b22_branch2c" + top: "res4b22" + name: "res4b22" + type: "Eltwise" +} + +layer { + bottom: "res4b22" + top: "res4b22" + name: "res4b22_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2a" + bottom: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2b" + bottom: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2a" + bottom: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2b" + bottom: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2a" + bottom: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2b" + bottom: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 2" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels" + top: "accuarcy" + #include: { phase: TEST } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: 'bbox_inside_weights' + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" +} + diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt new file mode 100644 index 0000000..2c43883 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt @@ -0,0 +1,7344 @@ +name: "ResNet-101" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "conv1" + bottom: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2a" + bottom: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2b" + bottom: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2a" + bottom: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2b" + bottom: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2a" + bottom: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2b" + bottom: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2a" + bottom: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2b" + bottom: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b1_branch2a" + name: "res3b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "bn3b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "scale3b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2a" + bottom: "res3b1_branch2a" + name: "res3b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2b" + name: "res3b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "bn3b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "scale3b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2b" + bottom: "res3b1_branch2b" + name: "res3b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2c" + name: "res3b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "bn3b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "scale3b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b1_branch2c" + top: "res3b1" + name: "res3b1" + type: "Eltwise" +} + +layer { + bottom: "res3b1" + top: "res3b1" + name: "res3b1_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1" + top: "res3b2_branch2a" + name: "res3b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "bn3b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "scale3b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2a" + bottom: "res3b2_branch2a" + name: "res3b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2b" + name: "res3b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "bn3b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "scale3b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2b" + bottom: "res3b2_branch2b" + name: "res3b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2c" + name: "res3b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "bn3b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "scale3b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1" + bottom: "res3b2_branch2c" + top: "res3b2" + name: "res3b2" + type: "Eltwise" +} + +layer { + bottom: "res3b2" + top: "res3b2" + name: "res3b2_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2" + top: "res3b3_branch2a" + name: "res3b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "bn3b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "scale3b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2a" + bottom: "res3b3_branch2a" + name: "res3b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2b" + name: "res3b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "bn3b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "scale3b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2b" + bottom: "res3b3_branch2b" + name: "res3b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2c" + name: "res3b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "bn3b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "scale3b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2" + bottom: "res3b3_branch2c" + top: "res3b3" + name: "res3b3" + type: "Eltwise" +} + +layer { + bottom: "res3b3" + top: "res3b3" + name: "res3b3_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2a" + bottom: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2b" + bottom: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b1_branch2a" + name: "res4b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "bn4b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "scale4b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2a" + bottom: "res4b1_branch2a" + name: "res4b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2b" + name: "res4b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "bn4b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "scale4b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2b" + bottom: "res4b1_branch2b" + name: "res4b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2c" + name: "res4b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "bn4b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "scale4b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b1_branch2c" + top: "res4b1" + name: "res4b1" + type: "Eltwise" +} + +layer { + bottom: "res4b1" + top: "res4b1" + name: "res4b1_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1" + top: "res4b2_branch2a" + name: "res4b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "bn4b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "scale4b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2a" + bottom: "res4b2_branch2a" + name: "res4b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2b" + name: "res4b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "bn4b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "scale4b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2b" + bottom: "res4b2_branch2b" + name: "res4b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2c" + name: "res4b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "bn4b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "scale4b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1" + bottom: "res4b2_branch2c" + top: "res4b2" + name: "res4b2" + type: "Eltwise" +} + +layer { + bottom: "res4b2" + top: "res4b2" + name: "res4b2_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2" + top: "res4b3_branch2a" + name: "res4b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "bn4b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "scale4b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2a" + bottom: "res4b3_branch2a" + name: "res4b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2b" + name: "res4b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "bn4b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "scale4b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2b" + bottom: "res4b3_branch2b" + name: "res4b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2c" + name: "res4b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "bn4b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "scale4b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2" + bottom: "res4b3_branch2c" + top: "res4b3" + name: "res4b3" + type: "Eltwise" +} + +layer { + bottom: "res4b3" + top: "res4b3" + name: "res4b3_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3" + top: "res4b4_branch2a" + name: "res4b4_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "bn4b4_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "scale4b4_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2a" + bottom: "res4b4_branch2a" + name: "res4b4_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2b" + name: "res4b4_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "bn4b4_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "scale4b4_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2b" + bottom: "res4b4_branch2b" + name: "res4b4_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2c" + name: "res4b4_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "bn4b4_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "scale4b4_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3" + bottom: "res4b4_branch2c" + top: "res4b4" + name: "res4b4" + type: "Eltwise" +} + +layer { + bottom: "res4b4" + top: "res4b4" + name: "res4b4_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4" + top: "res4b5_branch2a" + name: "res4b5_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "bn4b5_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "scale4b5_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2a" + bottom: "res4b5_branch2a" + name: "res4b5_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2b" + name: "res4b5_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "bn4b5_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "scale4b5_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2b" + bottom: "res4b5_branch2b" + name: "res4b5_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2c" + name: "res4b5_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "bn4b5_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "scale4b5_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4" + bottom: "res4b5_branch2c" + top: "res4b5" + name: "res4b5" + type: "Eltwise" +} + +layer { + bottom: "res4b5" + top: "res4b5" + name: "res4b5_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5" + top: "res4b6_branch2a" + name: "res4b6_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "bn4b6_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "scale4b6_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2a" + bottom: "res4b6_branch2a" + name: "res4b6_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2b" + name: "res4b6_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "bn4b6_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "scale4b6_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2b" + bottom: "res4b6_branch2b" + name: "res4b6_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2c" + name: "res4b6_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "bn4b6_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "scale4b6_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5" + bottom: "res4b6_branch2c" + top: "res4b6" + name: "res4b6" + type: "Eltwise" +} + +layer { + bottom: "res4b6" + top: "res4b6" + name: "res4b6_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6" + top: "res4b7_branch2a" + name: "res4b7_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "bn4b7_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "scale4b7_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2a" + bottom: "res4b7_branch2a" + name: "res4b7_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2b" + name: "res4b7_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "bn4b7_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "scale4b7_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2b" + bottom: "res4b7_branch2b" + name: "res4b7_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2c" + name: "res4b7_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "bn4b7_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "scale4b7_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6" + bottom: "res4b7_branch2c" + top: "res4b7" + name: "res4b7" + type: "Eltwise" +} + +layer { + bottom: "res4b7" + top: "res4b7" + name: "res4b7_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7" + top: "res4b8_branch2a" + name: "res4b8_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "bn4b8_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "scale4b8_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2a" + bottom: "res4b8_branch2a" + name: "res4b8_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2b" + name: "res4b8_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "bn4b8_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "scale4b8_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2b" + bottom: "res4b8_branch2b" + name: "res4b8_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2c" + name: "res4b8_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "bn4b8_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "scale4b8_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7" + bottom: "res4b8_branch2c" + top: "res4b8" + name: "res4b8" + type: "Eltwise" +} + +layer { + bottom: "res4b8" + top: "res4b8" + name: "res4b8_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8" + top: "res4b9_branch2a" + name: "res4b9_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "bn4b9_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "scale4b9_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2a" + bottom: "res4b9_branch2a" + name: "res4b9_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2b" + name: "res4b9_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "bn4b9_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "scale4b9_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2b" + bottom: "res4b9_branch2b" + name: "res4b9_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2c" + name: "res4b9_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "bn4b9_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "scale4b9_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8" + bottom: "res4b9_branch2c" + top: "res4b9" + name: "res4b9" + type: "Eltwise" +} + +layer { + bottom: "res4b9" + top: "res4b9" + name: "res4b9_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9" + top: "res4b10_branch2a" + name: "res4b10_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "bn4b10_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "scale4b10_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2a" + bottom: "res4b10_branch2a" + name: "res4b10_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2b" + name: "res4b10_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "bn4b10_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "scale4b10_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2b" + bottom: "res4b10_branch2b" + name: "res4b10_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2c" + name: "res4b10_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "bn4b10_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "scale4b10_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9" + bottom: "res4b10_branch2c" + top: "res4b10" + name: "res4b10" + type: "Eltwise" +} + +layer { + bottom: "res4b10" + top: "res4b10" + name: "res4b10_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10" + top: "res4b11_branch2a" + name: "res4b11_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "bn4b11_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "scale4b11_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2a" + bottom: "res4b11_branch2a" + name: "res4b11_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2b" + name: "res4b11_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "bn4b11_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "scale4b11_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2b" + bottom: "res4b11_branch2b" + name: "res4b11_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2c" + name: "res4b11_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "bn4b11_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "scale4b11_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10" + bottom: "res4b11_branch2c" + top: "res4b11" + name: "res4b11" + type: "Eltwise" +} + +layer { + bottom: "res4b11" + top: "res4b11" + name: "res4b11_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11" + top: "res4b12_branch2a" + name: "res4b12_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "bn4b12_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "scale4b12_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2a" + bottom: "res4b12_branch2a" + name: "res4b12_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2b" + name: "res4b12_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "bn4b12_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "scale4b12_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2b" + bottom: "res4b12_branch2b" + name: "res4b12_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2c" + name: "res4b12_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "bn4b12_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "scale4b12_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11" + bottom: "res4b12_branch2c" + top: "res4b12" + name: "res4b12" + type: "Eltwise" +} + +layer { + bottom: "res4b12" + top: "res4b12" + name: "res4b12_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12" + top: "res4b13_branch2a" + name: "res4b13_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "bn4b13_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "scale4b13_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2a" + bottom: "res4b13_branch2a" + name: "res4b13_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2b" + name: "res4b13_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "bn4b13_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "scale4b13_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2b" + bottom: "res4b13_branch2b" + name: "res4b13_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2c" + name: "res4b13_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "bn4b13_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "scale4b13_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12" + bottom: "res4b13_branch2c" + top: "res4b13" + name: "res4b13" + type: "Eltwise" +} + +layer { + bottom: "res4b13" + top: "res4b13" + name: "res4b13_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13" + top: "res4b14_branch2a" + name: "res4b14_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "bn4b14_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "scale4b14_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2a" + bottom: "res4b14_branch2a" + name: "res4b14_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2b" + name: "res4b14_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "bn4b14_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "scale4b14_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2b" + bottom: "res4b14_branch2b" + name: "res4b14_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2c" + name: "res4b14_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "bn4b14_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "scale4b14_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13" + bottom: "res4b14_branch2c" + top: "res4b14" + name: "res4b14" + type: "Eltwise" +} + +layer { + bottom: "res4b14" + top: "res4b14" + name: "res4b14_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14" + top: "res4b15_branch2a" + name: "res4b15_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "bn4b15_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "scale4b15_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2a" + bottom: "res4b15_branch2a" + name: "res4b15_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2b" + name: "res4b15_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "bn4b15_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "scale4b15_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2b" + bottom: "res4b15_branch2b" + name: "res4b15_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2c" + name: "res4b15_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "bn4b15_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "scale4b15_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14" + bottom: "res4b15_branch2c" + top: "res4b15" + name: "res4b15" + type: "Eltwise" +} + +layer { + bottom: "res4b15" + top: "res4b15" + name: "res4b15_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15" + top: "res4b16_branch2a" + name: "res4b16_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "bn4b16_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "scale4b16_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2a" + bottom: "res4b16_branch2a" + name: "res4b16_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2b" + name: "res4b16_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "bn4b16_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "scale4b16_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2b" + bottom: "res4b16_branch2b" + name: "res4b16_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2c" + name: "res4b16_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "bn4b16_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "scale4b16_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15" + bottom: "res4b16_branch2c" + top: "res4b16" + name: "res4b16" + type: "Eltwise" +} + +layer { + bottom: "res4b16" + top: "res4b16" + name: "res4b16_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16" + top: "res4b17_branch2a" + name: "res4b17_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "bn4b17_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "scale4b17_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2a" + bottom: "res4b17_branch2a" + name: "res4b17_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2b" + name: "res4b17_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "bn4b17_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "scale4b17_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2b" + bottom: "res4b17_branch2b" + name: "res4b17_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2c" + name: "res4b17_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "bn4b17_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "scale4b17_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16" + bottom: "res4b17_branch2c" + top: "res4b17" + name: "res4b17" + type: "Eltwise" +} + +layer { + bottom: "res4b17" + top: "res4b17" + name: "res4b17_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17" + top: "res4b18_branch2a" + name: "res4b18_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "bn4b18_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "scale4b18_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2a" + bottom: "res4b18_branch2a" + name: "res4b18_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2b" + name: "res4b18_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "bn4b18_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "scale4b18_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2b" + bottom: "res4b18_branch2b" + name: "res4b18_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2c" + name: "res4b18_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "bn4b18_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "scale4b18_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17" + bottom: "res4b18_branch2c" + top: "res4b18" + name: "res4b18" + type: "Eltwise" +} + +layer { + bottom: "res4b18" + top: "res4b18" + name: "res4b18_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18" + top: "res4b19_branch2a" + name: "res4b19_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "bn4b19_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "scale4b19_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2a" + bottom: "res4b19_branch2a" + name: "res4b19_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2b" + name: "res4b19_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "bn4b19_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "scale4b19_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2b" + bottom: "res4b19_branch2b" + name: "res4b19_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2c" + name: "res4b19_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "bn4b19_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "scale4b19_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18" + bottom: "res4b19_branch2c" + top: "res4b19" + name: "res4b19" + type: "Eltwise" +} + +layer { + bottom: "res4b19" + top: "res4b19" + name: "res4b19_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19" + top: "res4b20_branch2a" + name: "res4b20_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "bn4b20_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "scale4b20_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2a" + bottom: "res4b20_branch2a" + name: "res4b20_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2b" + name: "res4b20_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "bn4b20_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "scale4b20_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2b" + bottom: "res4b20_branch2b" + name: "res4b20_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2c" + name: "res4b20_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "bn4b20_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "scale4b20_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19" + bottom: "res4b20_branch2c" + top: "res4b20" + name: "res4b20" + type: "Eltwise" +} + +layer { + bottom: "res4b20" + top: "res4b20" + name: "res4b20_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20" + top: "res4b21_branch2a" + name: "res4b21_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "bn4b21_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "scale4b21_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2a" + bottom: "res4b21_branch2a" + name: "res4b21_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2b" + name: "res4b21_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "bn4b21_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "scale4b21_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2b" + bottom: "res4b21_branch2b" + name: "res4b21_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2c" + name: "res4b21_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "bn4b21_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "scale4b21_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20" + bottom: "res4b21_branch2c" + top: "res4b21" + name: "res4b21" + type: "Eltwise" +} + +layer { + bottom: "res4b21" + top: "res4b21" + name: "res4b21_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21" + top: "res4b22_branch2a" + name: "res4b22_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "bn4b22_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "scale4b22_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2a" + bottom: "res4b22_branch2a" + name: "res4b22_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2b" + name: "res4b22_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "bn4b22_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "scale4b22_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2b" + bottom: "res4b22_branch2b" + name: "res4b22_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2c" + name: "res4b22_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "bn4b22_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "scale4b22_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21" + bottom: "res4b22_branch2c" + top: "res4b22" + name: "res4b22" + type: "Eltwise" +} + +layer { + bottom: "res4b22" + top: "res4b22" + name: "res4b22_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2a" + bottom: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2b" + bottom: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2a" + bottom: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2b" + bottom: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2a" + bottom: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2b" + bottom: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 2" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#--------------online hard example mining-------------- +layer { + name: "per_roi_loss_cls" + type: "SoftmaxWithLossOHEM" + bottom: "cls_score" + bottom: "labels" + top: "temp_loss_cls" + top: "temp_prob_cls" + top: "per_roi_loss_cls" + loss_weight: 0 + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + top: "temp_loss_bbox" + top: "per_roi_loss_bbox" + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss" + type: "Eltwise" + bottom: "per_roi_loss_cls" + bottom: "per_roi_loss_bbox" + top: "per_roi_loss" + propagate_down: false + propagate_down: false +} + +layer { + bottom: "rois" + bottom: "per_roi_loss" + bottom: "labels" + bottom: "bbox_inside_weights" + top: "labels_ohem" + top: "bbox_loss_weights_ohem" + name: "annotator_detector" + type: "BoxAnnotatorOHEM" + box_annotator_ohem_param { + roi_per_img: 128 + ignore_label: -1 + } + propagate_down: false + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" + bottom: "temp_loss_cls" + bottom: "temp_prob_cls" + bottom: "temp_loss_bbox" +} + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels_ohem" + top: "loss_cls" + loss_weight: 1 + loss_param { + ignore_label: -1 + } + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels_ohem" + top: "accuarcy" + #include: { phase: TEST } + accuracy_param { + ignore_label: -1 + } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_loss_weights_ohem" + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/test.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/test.prototxt new file mode 100644 index 0000000..83b223c --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/test.prototxt @@ -0,0 +1,3787 @@ +name: "ResNet50" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + } + param { + lr_mult: 0.0 + } + param { + lr_mult: 0.0 + } + +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b_branch2a" + name: "res3b_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "bn3b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "scale3b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "res3b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2b" + name: "res3b_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "bn3b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "scale3b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "res3b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2c" + name: "res3b_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "bn3b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "scale3b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b_branch2c" + top: "res3b" + name: "res3b" + type: "Eltwise" +} + +layer { + bottom: "res3b" + top: "res3b" + name: "res3b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b" + top: "res3c_branch2a" + name: "res3c_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "bn3c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "scale3c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "res3c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2b" + name: "res3c_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "bn3c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "scale3c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "res3c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2c" + name: "res3c_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "bn3c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "scale3c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b" + bottom: "res3c_branch2c" + top: "res3c" + name: "res3c" + type: "Eltwise" +} + +layer { + bottom: "res3c" + top: "res3c" + name: "res3c_relu" + type: "ReLU" +} + +layer { + bottom: "res3c" + top: "res3d_branch2a" + name: "res3d_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "bn3d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "scale3d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "res3d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2b" + name: "res3d_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "bn3d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "scale3d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "res3d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2c" + name: "res3d_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "bn3d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "scale3d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c" + bottom: "res3d_branch2c" + top: "res3d" + name: "res3d" + type: "Eltwise" +} + +layer { + bottom: "res3d" + top: "res3d" + name: "res3d_relu" + type: "ReLU" +} + +layer { + bottom: "res3d" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b_branch2a" + name: "res4b_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "bn4b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "scale4b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "res4b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2b" + name: "res4b_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "bn4b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "scale4b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "res4b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2c" + name: "res4b_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "bn4b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "scale4b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b_branch2c" + top: "res4b" + name: "res4b" + type: "Eltwise" +} + +layer { + bottom: "res4b" + top: "res4b" + name: "res4b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b" + top: "res4c_branch2a" + name: "res4c_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "bn4c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "scale4c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "res4c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2b" + name: "res4c_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "bn4c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "scale4c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "res4c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2c" + name: "res4c_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "bn4c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "scale4c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b" + bottom: "res4c_branch2c" + top: "res4c" + name: "res4c" + type: "Eltwise" +} + +layer { + bottom: "res4c" + top: "res4c" + name: "res4c_relu" + type: "ReLU" +} + +layer { + bottom: "res4c" + top: "res4d_branch2a" + name: "res4d_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "bn4d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "scale4d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "res4d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2b" + name: "res4d_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "bn4d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "scale4d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "res4d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2c" + name: "res4d_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "bn4d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "scale4d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c" + bottom: "res4d_branch2c" + top: "res4d" + name: "res4d" + type: "Eltwise" +} + +layer { + bottom: "res4d" + top: "res4d" + name: "res4d_relu" + type: "ReLU" +} + +layer { + bottom: "res4d" + top: "res4e_branch2a" + name: "res4e_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "bn4e_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "scale4e_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "res4e_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2b" + name: "res4e_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "bn4e_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "scale4e_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "res4e_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2c" + name: "res4e_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "bn4e_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "scale4e_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d" + bottom: "res4e_branch2c" + top: "res4e" + name: "res4e" + type: "Eltwise" +} + +layer { + bottom: "res4e" + top: "res4e" + name: "res4e_relu" + type: "ReLU" +} + +layer { + bottom: "res4e" + top: "res4f_branch2a" + name: "res4f_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "bn4f_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "scale4f_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "res4f_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2b" + name: "res4f_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "bn4f_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "scale4f_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "res4f_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2c" + name: "res4f_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "bn4f_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "scale4f_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e" + bottom: "res4f_branch2c" + top: "res4f" + name: "res4f" + type: "Eltwise" +} + +layer { + bottom: "res4f" + top: "res4f" + name: "res4f_relu" + type: "ReLU" +} + +layer { + bottom: "res4f" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 4116 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 84 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred_pre" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob_pre" +} + +layer { + name: "cls_prob_reshape" + type: "Reshape" + bottom: "cls_prob_pre" + top: "cls_prob" + reshape_param { + shape { + dim: -1 + dim: 21 + } + } +} + +layer { + name: "bbox_pred_reshape" + type: "Reshape" + bottom: "bbox_pred_pre" + top: "bbox_pred" + reshape_param { + shape { + dim: -1 + dim: 84 + } + } +} + + diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/train_ohem.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/train_ohem.prototxt new file mode 100644 index 0000000..f709103 --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/train_ohem.prototxt @@ -0,0 +1,3946 @@ +name: "ResNet-50" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + } + param { + lr_mult: 0.0 + } + param { + lr_mult: 0.0 + } + +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b_branch2a" + name: "res3b_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "bn3b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "scale3b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "res3b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2b" + name: "res3b_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "bn3b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "scale3b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "res3b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2c" + name: "res3b_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "bn3b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "scale3b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b_branch2c" + top: "res3b" + name: "res3b" + type: "Eltwise" +} + +layer { + bottom: "res3b" + top: "res3b" + name: "res3b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b" + top: "res3c_branch2a" + name: "res3c_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "bn3c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "scale3c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "res3c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2b" + name: "res3c_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "bn3c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "scale3c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "res3c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2c" + name: "res3c_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "bn3c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "scale3c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b" + bottom: "res3c_branch2c" + top: "res3c" + name: "res3c" + type: "Eltwise" +} + +layer { + bottom: "res3c" + top: "res3c" + name: "res3c_relu" + type: "ReLU" +} + +layer { + bottom: "res3c" + top: "res3d_branch2a" + name: "res3d_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "bn3d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "scale3d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "res3d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2b" + name: "res3d_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "bn3d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "scale3d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "res3d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2c" + name: "res3d_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "bn3d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "scale3d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c" + bottom: "res3d_branch2c" + top: "res3d" + name: "res3d" + type: "Eltwise" +} + +layer { + bottom: "res3d" + top: "res3d" + name: "res3d_relu" + type: "ReLU" +} + +layer { + bottom: "res3d" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b_branch2a" + name: "res4b_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "bn4b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "scale4b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "res4b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2b" + name: "res4b_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "bn4b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "scale4b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "res4b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2c" + name: "res4b_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "bn4b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "scale4b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b_branch2c" + top: "res4b" + name: "res4b" + type: "Eltwise" +} + +layer { + bottom: "res4b" + top: "res4b" + name: "res4b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b" + top: "res4c_branch2a" + name: "res4c_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "bn4c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "scale4c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "res4c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2b" + name: "res4c_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "bn4c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "scale4c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "res4c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2c" + name: "res4c_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "bn4c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "scale4c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b" + bottom: "res4c_branch2c" + top: "res4c" + name: "res4c" + type: "Eltwise" +} + +layer { + bottom: "res4c" + top: "res4c" + name: "res4c_relu" + type: "ReLU" +} + +layer { + bottom: "res4c" + top: "res4d_branch2a" + name: "res4d_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "bn4d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "scale4d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "res4d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2b" + name: "res4d_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "bn4d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "scale4d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "res4d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2c" + name: "res4d_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "bn4d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "scale4d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c" + bottom: "res4d_branch2c" + top: "res4d" + name: "res4d" + type: "Eltwise" +} + +layer { + bottom: "res4d" + top: "res4d" + name: "res4d_relu" + type: "ReLU" +} + +layer { + bottom: "res4d" + top: "res4e_branch2a" + name: "res4e_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "bn4e_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "scale4e_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "res4e_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2b" + name: "res4e_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "bn4e_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "scale4e_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "res4e_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2c" + name: "res4e_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "bn4e_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "scale4e_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d" + bottom: "res4e_branch2c" + top: "res4e" + name: "res4e" + type: "Eltwise" +} + +layer { + bottom: "res4e" + top: "res4e" + name: "res4e_relu" + type: "ReLU" +} + +layer { + bottom: "res4e" + top: "res4f_branch2a" + name: "res4f_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "bn4f_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "scale4f_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "res4f_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2b" + name: "res4f_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "bn4f_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "scale4f_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "res4f_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2c" + name: "res4f_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "bn4f_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "scale4f_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e" + bottom: "res4f_branch2c" + top: "res4f" + name: "res4f" + type: "Eltwise" +} + +layer { + bottom: "res4f" + top: "res4f" + name: "res4f_relu" + type: "ReLU" +} + +layer { + bottom: "res4f" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 21" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 4116 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 84 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#--------------online hard example mining-------------- +layer { + name: "per_roi_loss_cls" + type: "SoftmaxWithLossOHEM" + bottom: "cls_score" + bottom: "labels" + top: "temp_loss_cls" + top: "temp_prob_cls" + top: "per_roi_loss_cls" + loss_weight: 0 + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + top: "temp_loss_bbox" + top: "per_roi_loss_bbox" + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss" + type: "Eltwise" + bottom: "per_roi_loss_cls" + bottom: "per_roi_loss_bbox" + top: "per_roi_loss" + propagate_down: false + propagate_down: false +} + +layer { + bottom: "rois" + bottom: "per_roi_loss" + bottom: "labels" + bottom: "bbox_inside_weights" + top: "labels_ohem" + top: "bbox_loss_weights_ohem" + name: "annotator_detector" + type: "BoxAnnotatorOHEM" + box_annotator_ohem_param { + roi_per_img: 128 + ignore_label: -1 + } + propagate_down: false + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" + bottom: "temp_loss_cls" + bottom: "temp_prob_cls" + bottom: "temp_loss_bbox" +} + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels_ohem" + top: "loss_cls" + loss_weight: 1 + loss_param { + ignore_label: -1 + } + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels_ohem" + top: "accuarcy" + #include: { phase: TEST } + accuracy_param { + ignore_label: -1 + } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_loss_weights_ohem" + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/solver.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/solver.prototxt new file mode 100644 index 0000000..873a4bc --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/solver.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 80000 +display: 20 + +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet50_rfcn" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/solver_ohem.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_ohem.prototxt new file mode 100644 index 0000000..c8d196b --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_ohem.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 80000 +display: 20 + +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet50_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup.prototxt new file mode 100644 index 0000000..42274a9 --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup.prototxt @@ -0,0 +1,14 @@ +train_net: "models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt" +lr_policy: "fixed" +base_lr: 0.0001 + +display: 20 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet50_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup_continue.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup_continue.prototxt new file mode 100644 index 0000000..07255ca --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup_continue.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 70000 + +display: 20 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet50_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/test_agonistic.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/test_agonistic.prototxt new file mode 100644 index 0000000..898e8eb --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/test_agonistic.prototxt @@ -0,0 +1,3787 @@ +name: "ResNet50" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + } + param { + lr_mult: 0.0 + } + param { + lr_mult: 0.0 + } + +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b_branch2a" + name: "res3b_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "bn3b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "scale3b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "res3b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2b" + name: "res3b_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "bn3b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "scale3b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "res3b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2c" + name: "res3b_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "bn3b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "scale3b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b_branch2c" + top: "res3b" + name: "res3b" + type: "Eltwise" +} + +layer { + bottom: "res3b" + top: "res3b" + name: "res3b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b" + top: "res3c_branch2a" + name: "res3c_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "bn3c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "scale3c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "res3c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2b" + name: "res3c_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "bn3c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "scale3c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "res3c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2c" + name: "res3c_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "bn3c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "scale3c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b" + bottom: "res3c_branch2c" + top: "res3c" + name: "res3c" + type: "Eltwise" +} + +layer { + bottom: "res3c" + top: "res3c" + name: "res3c_relu" + type: "ReLU" +} + +layer { + bottom: "res3c" + top: "res3d_branch2a" + name: "res3d_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "bn3d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "scale3d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "res3d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2b" + name: "res3d_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "bn3d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "scale3d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "res3d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2c" + name: "res3d_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "bn3d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "scale3d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c" + bottom: "res3d_branch2c" + top: "res3d" + name: "res3d" + type: "Eltwise" +} + +layer { + bottom: "res3d" + top: "res3d" + name: "res3d_relu" + type: "ReLU" +} + +layer { + bottom: "res3d" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b_branch2a" + name: "res4b_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "bn4b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "scale4b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "res4b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2b" + name: "res4b_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "bn4b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "scale4b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "res4b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2c" + name: "res4b_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "bn4b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "scale4b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b_branch2c" + top: "res4b" + name: "res4b" + type: "Eltwise" +} + +layer { + bottom: "res4b" + top: "res4b" + name: "res4b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b" + top: "res4c_branch2a" + name: "res4c_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "bn4c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "scale4c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "res4c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2b" + name: "res4c_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "bn4c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "scale4c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "res4c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2c" + name: "res4c_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "bn4c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "scale4c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b" + bottom: "res4c_branch2c" + top: "res4c" + name: "res4c" + type: "Eltwise" +} + +layer { + bottom: "res4c" + top: "res4c" + name: "res4c_relu" + type: "ReLU" +} + +layer { + bottom: "res4c" + top: "res4d_branch2a" + name: "res4d_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "bn4d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "scale4d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "res4d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2b" + name: "res4d_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "bn4d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "scale4d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "res4d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2c" + name: "res4d_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "bn4d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "scale4d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c" + bottom: "res4d_branch2c" + top: "res4d" + name: "res4d" + type: "Eltwise" +} + +layer { + bottom: "res4d" + top: "res4d" + name: "res4d_relu" + type: "ReLU" +} + +layer { + bottom: "res4d" + top: "res4e_branch2a" + name: "res4e_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "bn4e_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "scale4e_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "res4e_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2b" + name: "res4e_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "bn4e_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "scale4e_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "res4e_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2c" + name: "res4e_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "bn4e_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "scale4e_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d" + bottom: "res4e_branch2c" + top: "res4e" + name: "res4e" + type: "Eltwise" +} + +layer { + bottom: "res4e" + top: "res4e" + name: "res4e_relu" + type: "ReLU" +} + +layer { + bottom: "res4e" + top: "res4f_branch2a" + name: "res4f_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "bn4f_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "scale4f_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "res4f_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2b" + name: "res4f_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "bn4f_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "scale4f_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "res4f_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2c" + name: "res4f_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "bn4f_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "scale4f_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e" + bottom: "res4f_branch2c" + top: "res4f" + name: "res4f" + type: "Eltwise" +} + +layer { + bottom: "res4f" + top: "res4f" + name: "res4f_relu" + type: "ReLU" +} + +layer { + bottom: "res4f" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred_pre" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob_pre" +} + +layer { + name: "cls_prob_reshape" + type: "Reshape" + bottom: "cls_prob_pre" + top: "cls_prob" + reshape_param { + shape { + dim: -1 + dim: 21 + } + } +} + +layer { + name: "bbox_pred_reshape" + type: "Reshape" + bottom: "bbox_pred_pre" + top: "bbox_pred" + reshape_param { + shape { + dim: -1 + dim: 8 + } + } +} + + diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic.prototxt new file mode 100644 index 0000000..13687cc --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic.prototxt @@ -0,0 +1,3877 @@ +name: "ResNet-50" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + } + param { + lr_mult: 0.0 + } + param { + lr_mult: 0.0 + } + +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b_branch2a" + name: "res3b_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "bn3b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "scale3b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "res3b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2b" + name: "res3b_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "bn3b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "scale3b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "res3b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2c" + name: "res3b_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "bn3b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "scale3b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b_branch2c" + top: "res3b" + name: "res3b" + type: "Eltwise" +} + +layer { + bottom: "res3b" + top: "res3b" + name: "res3b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b" + top: "res3c_branch2a" + name: "res3c_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "bn3c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "scale3c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "res3c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2b" + name: "res3c_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "bn3c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "scale3c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "res3c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2c" + name: "res3c_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "bn3c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "scale3c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b" + bottom: "res3c_branch2c" + top: "res3c" + name: "res3c" + type: "Eltwise" +} + +layer { + bottom: "res3c" + top: "res3c" + name: "res3c_relu" + type: "ReLU" +} + +layer { + bottom: "res3c" + top: "res3d_branch2a" + name: "res3d_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "bn3d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "scale3d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "res3d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2b" + name: "res3d_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "bn3d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "scale3d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "res3d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2c" + name: "res3d_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "bn3d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "scale3d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c" + bottom: "res3d_branch2c" + top: "res3d" + name: "res3d" + type: "Eltwise" +} + +layer { + bottom: "res3d" + top: "res3d" + name: "res3d_relu" + type: "ReLU" +} + +layer { + bottom: "res3d" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b_branch2a" + name: "res4b_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "bn4b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "scale4b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "res4b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2b" + name: "res4b_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "bn4b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "scale4b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "res4b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2c" + name: "res4b_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "bn4b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "scale4b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b_branch2c" + top: "res4b" + name: "res4b" + type: "Eltwise" +} + +layer { + bottom: "res4b" + top: "res4b" + name: "res4b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b" + top: "res4c_branch2a" + name: "res4c_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "bn4c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "scale4c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "res4c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2b" + name: "res4c_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "bn4c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "scale4c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "res4c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2c" + name: "res4c_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "bn4c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "scale4c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b" + bottom: "res4c_branch2c" + top: "res4c" + name: "res4c" + type: "Eltwise" +} + +layer { + bottom: "res4c" + top: "res4c" + name: "res4c_relu" + type: "ReLU" +} + +layer { + bottom: "res4c" + top: "res4d_branch2a" + name: "res4d_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "bn4d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "scale4d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "res4d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2b" + name: "res4d_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "bn4d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "scale4d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "res4d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2c" + name: "res4d_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "bn4d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "scale4d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c" + bottom: "res4d_branch2c" + top: "res4d" + name: "res4d" + type: "Eltwise" +} + +layer { + bottom: "res4d" + top: "res4d" + name: "res4d_relu" + type: "ReLU" +} + +layer { + bottom: "res4d" + top: "res4e_branch2a" + name: "res4e_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "bn4e_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "scale4e_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "res4e_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2b" + name: "res4e_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "bn4e_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "scale4e_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "res4e_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2c" + name: "res4e_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "bn4e_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "scale4e_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d" + bottom: "res4e_branch2c" + top: "res4e" + name: "res4e" + type: "Eltwise" +} + +layer { + bottom: "res4e" + top: "res4e" + name: "res4e_relu" + type: "ReLU" +} + +layer { + bottom: "res4e" + top: "res4f_branch2a" + name: "res4f_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "bn4f_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "scale4f_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "res4f_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2b" + name: "res4f_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "bn4f_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "scale4f_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "res4f_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2c" + name: "res4f_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "bn4f_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "scale4f_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e" + bottom: "res4f_branch2c" + top: "res4f" + name: "res4f" + type: "Eltwise" +} + +layer { + bottom: "res4f" + top: "res4f" + name: "res4f_relu" + type: "ReLU" +} + +layer { + bottom: "res4f" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 2" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels" + top: "accuarcy" + #include: { phase: TEST } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: 'bbox_inside_weights' + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" +} + diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt new file mode 100644 index 0000000..7981ac8 --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt @@ -0,0 +1,3946 @@ +name: "ResNet-50" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + } + param { + lr_mult: 0.0 + } + param { + lr_mult: 0.0 + } + +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b_branch2a" + name: "res3b_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "bn3b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "scale3b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "res3b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2b" + name: "res3b_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "bn3b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "scale3b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "res3b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2c" + name: "res3b_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "bn3b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "scale3b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b_branch2c" + top: "res3b" + name: "res3b" + type: "Eltwise" +} + +layer { + bottom: "res3b" + top: "res3b" + name: "res3b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b" + top: "res3c_branch2a" + name: "res3c_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "bn3c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "scale3c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "res3c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2b" + name: "res3c_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "bn3c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "scale3c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "res3c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2c" + name: "res3c_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "bn3c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "scale3c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b" + bottom: "res3c_branch2c" + top: "res3c" + name: "res3c" + type: "Eltwise" +} + +layer { + bottom: "res3c" + top: "res3c" + name: "res3c_relu" + type: "ReLU" +} + +layer { + bottom: "res3c" + top: "res3d_branch2a" + name: "res3d_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "bn3d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "scale3d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "res3d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2b" + name: "res3d_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "bn3d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "scale3d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "res3d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2c" + name: "res3d_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "bn3d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "scale3d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c" + bottom: "res3d_branch2c" + top: "res3d" + name: "res3d" + type: "Eltwise" +} + +layer { + bottom: "res3d" + top: "res3d" + name: "res3d_relu" + type: "ReLU" +} + +layer { + bottom: "res3d" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b_branch2a" + name: "res4b_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "bn4b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "scale4b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "res4b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2b" + name: "res4b_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "bn4b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "scale4b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "res4b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2c" + name: "res4b_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "bn4b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "scale4b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b_branch2c" + top: "res4b" + name: "res4b" + type: "Eltwise" +} + +layer { + bottom: "res4b" + top: "res4b" + name: "res4b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b" + top: "res4c_branch2a" + name: "res4c_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "bn4c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "scale4c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "res4c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2b" + name: "res4c_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "bn4c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "scale4c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "res4c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2c" + name: "res4c_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "bn4c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "scale4c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b" + bottom: "res4c_branch2c" + top: "res4c" + name: "res4c" + type: "Eltwise" +} + +layer { + bottom: "res4c" + top: "res4c" + name: "res4c_relu" + type: "ReLU" +} + +layer { + bottom: "res4c" + top: "res4d_branch2a" + name: "res4d_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "bn4d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "scale4d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "res4d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2b" + name: "res4d_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "bn4d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "scale4d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "res4d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2c" + name: "res4d_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "bn4d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "scale4d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c" + bottom: "res4d_branch2c" + top: "res4d" + name: "res4d" + type: "Eltwise" +} + +layer { + bottom: "res4d" + top: "res4d" + name: "res4d_relu" + type: "ReLU" +} + +layer { + bottom: "res4d" + top: "res4e_branch2a" + name: "res4e_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "bn4e_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "scale4e_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "res4e_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2b" + name: "res4e_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "bn4e_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "scale4e_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "res4e_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2c" + name: "res4e_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "bn4e_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "scale4e_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d" + bottom: "res4e_branch2c" + top: "res4e" + name: "res4e" + type: "Eltwise" +} + +layer { + bottom: "res4e" + top: "res4e" + name: "res4e_relu" + type: "ReLU" +} + +layer { + bottom: "res4e" + top: "res4f_branch2a" + name: "res4f_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "bn4f_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "scale4f_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "res4f_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2b" + name: "res4f_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "bn4f_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "scale4f_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "res4f_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2c" + name: "res4f_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "bn4f_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "scale4f_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e" + bottom: "res4f_branch2c" + top: "res4f" + name: "res4f" + type: "Eltwise" +} + +layer { + bottom: "res4f" + top: "res4f" + name: "res4f_relu" + type: "ReLU" +} + +layer { + bottom: "res4f" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 2" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#--------------online hard example mining-------------- +layer { + name: "per_roi_loss_cls" + type: "SoftmaxWithLossOHEM" + bottom: "cls_score" + bottom: "labels" + top: "temp_loss_cls" + top: "temp_prob_cls" + top: "per_roi_loss_cls" + loss_weight: 0 + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + top: "temp_loss_bbox" + top: "per_roi_loss_bbox" + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss" + type: "Eltwise" + bottom: "per_roi_loss_cls" + bottom: "per_roi_loss_bbox" + top: "per_roi_loss" + propagate_down: false + propagate_down: false +} + +layer { + bottom: "rois" + bottom: "per_roi_loss" + bottom: "labels" + bottom: "bbox_inside_weights" + top: "labels_ohem" + top: "bbox_loss_weights_ohem" + name: "annotator_detector" + type: "BoxAnnotatorOHEM" + box_annotator_ohem_param { + roi_per_img: 128 + ignore_label: -1 + } + propagate_down: false + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" + bottom: "temp_loss_cls" + bottom: "temp_prob_cls" + bottom: "temp_loss_bbox" +} + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels_ohem" + top: "loss_cls" + loss_weight: 1 + loss_param { + ignore_label: -1 + } + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels_ohem" + top: "accuarcy" + #include: { phase: TEST } + accuracy_param { + ignore_label: -1 + } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_loss_weights_ohem" + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + diff --git a/models/pascal_voc/VGG16/fast_rcnn/solver.prototxt b/models/pascal_voc/VGG16/fast_rcnn/solver.prototxt new file mode 100644 index 0000000..9449ab1 --- /dev/null +++ b/models/pascal_voc/VGG16/fast_rcnn/solver.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/fast_rcnn/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +# iter_size: 1 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_fast_rcnn" +#debug_info: true diff --git a/models/pascal_voc/VGG16/fast_rcnn/test.prototxt b/models/pascal_voc/VGG16/fast_rcnn/test.prototxt new file mode 100644 index 0000000..fec2369 --- /dev/null +++ b/models/pascal_voc/VGG16/fast_rcnn/test.prototxt @@ -0,0 +1,517 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "rois" +input_shape { + dim: 1 # to be changed on-the-fly to num ROIs + dim: 5 # [batch ind, x1, y1, x2, y2] zero-based indexing +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG16/fast_rcnn/train.prototxt b/models/pascal_voc/VGG16/fast_rcnn/train.prototxt new file mode 100644 index 0000000..2e7958f --- /dev/null +++ b/models/pascal_voc/VGG16/fast_rcnn/train.prototxt @@ -0,0 +1,503 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/faster_rcnn_test.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/faster_rcnn_test.pt new file mode 100644 index 0000000..c264bfd --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/faster_rcnn_test.pt @@ -0,0 +1,409 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + convolution_param { + num_output: 64 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + convolution_param { + num_output: 64 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + convolution_param { + num_output: 128 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + convolution_param { + num_output: 128 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + inner_product_param { + num_output: 21 + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + inner_product_param { + num_output: 84 + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/rpn_test.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/rpn_test.pt new file mode 100644 index 0000000..0e7b1da --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/rpn_test.pt @@ -0,0 +1,341 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + convolution_param { + num_output: 64 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + convolution_param { + num_output: 64 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + convolution_param { + num_output: 128 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + convolution_param { + num_output: 128 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + top: 'scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..82df70a --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_fast_rcnn" diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt new file mode 100644 index 0000000..628dc85 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt @@ -0,0 +1,542 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt new file mode 100644 index 0000000..a6e29f0 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_rpn" diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_train.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_train.pt new file mode 100644 index 0000000..e5f3c50 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_train.pt @@ -0,0 +1,525 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 25088 } + data_filler { type: "constant" value: 0 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..861536c --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_fast_rcnn" diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt new file mode 100644 index 0000000..e38bb2b --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt @@ -0,0 +1,490 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt new file mode 100644 index 0000000..7199df8 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_rpn" diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_train.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_train.pt new file mode 100644 index 0000000..6d82857 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_train.pt @@ -0,0 +1,465 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 25088 } + data_filler { type: "constant" value: 0 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_end2end/solver.prototxt b/models/pascal_voc/VGG16/faster_rcnn_end2end/solver.prototxt new file mode 100644 index 0000000..7547cc8 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_end2end/solver.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/faster_rcnn_end2end/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 50000 +display: 20 +average_loss: 100 +# iter_size: 1 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_faster_rcnn" +iter_size: 2 diff --git a/models/pascal_voc/VGG16/faster_rcnn_end2end/test.prototxt b/models/pascal_voc/VGG16/faster_rcnn_end2end/test.prototxt new file mode 100644 index 0000000..4a93820 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_end2end/test.prototxt @@ -0,0 +1,608 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_end2end/train.prototxt b/models/pascal_voc/VGG16/faster_rcnn_end2end/train.prototxt new file mode 100644 index 0000000..ebadb49 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_end2end/train.prototxt @@ -0,0 +1,673 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 21" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/solver.prototxt b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/solver.prototxt new file mode 100644 index 0000000..eaa94d9 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/solver.prototxt @@ -0,0 +1,15 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_fast_rcnn" +#debug_info: true diff --git a/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/test.prototxt b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/test.prototxt new file mode 100644 index 0000000..baeac36 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/test.prototxt @@ -0,0 +1,317 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "rois" +input_shape { + dim: 1 # to be changed on-the-fly to num ROIs + dim: 5 # [batch ind, x1, y1, x2, y2] zero-based indexing +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/train.prototxt b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/train.prototxt new file mode 100644 index 0000000..d702367 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/train.prototxt @@ -0,0 +1,310 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/faster_rcnn_test.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/faster_rcnn_test.pt new file mode 100644 index 0000000..954b276 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/faster_rcnn_test.pt @@ -0,0 +1,289 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + inner_product_param { + num_output: 21 + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + inner_product_param { + num_output: 84 + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/rpn_test.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/rpn_test.pt new file mode 100644 index 0000000..fdf373f --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/rpn_test.pt @@ -0,0 +1,221 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + top: 'scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..8444a3e --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_fast_rcnn" diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt new file mode 100644 index 0000000..1bc1534 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt @@ -0,0 +1,337 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 256 + pad: 1 kernel_size: 5 stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt new file mode 100644 index 0000000..6bea5fc --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_rpn" diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_train.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_train.pt new file mode 100644 index 0000000..29fdfe0 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_train.pt @@ -0,0 +1,286 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 256 + pad: 1 kernel_size: 5 stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 18432 } + data_filler { type: "gaussian" std: 0.01 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..a45a6ee --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_fast_rcnn" diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt new file mode 100644 index 0000000..4825b1b --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt @@ -0,0 +1,337 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 kernel_size: 5 stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt new file mode 100644 index 0000000..85f4f0c --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_rpn" diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_train.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_train.pt new file mode 100644 index 0000000..d27e76b --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_train.pt @@ -0,0 +1,286 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 kernel_size: 5 stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 18432 } + data_filler { type: "gaussian" std: 0.01 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt new file mode 100644 index 0000000..8134d4e --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt @@ -0,0 +1,14 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 50000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_faster_rcnn" diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt new file mode 100644 index 0000000..c8bc90a --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt @@ -0,0 +1,450 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +#layer { +# name: "rpn_conv/3x3" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/3x3" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 192 +# kernel_size: 3 pad: 1 stride: 1 +# weight_filler { type: "gaussian" std: 0.01 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn_conv/5x5" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/5x5" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 64 +# kernel_size: 5 pad: 2 stride: 1 +# weight_filler { type: "gaussian" std: 0.0036 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn/output" +# type: "Concat" +# bottom: "rpn_conv/3x3" +# bottom: "rpn_conv/5x5" +# top: "rpn/output" +#} +#layer { +# name: "rpn_relu/output" +# type: "ReLU" +# bottom: "rpn/output" +# top: "rpn/output" +#} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt new file mode 100644 index 0000000..81a4d3e --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt @@ -0,0 +1,484 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 21" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/pascal_voc/ZF/fast_rcnn/solver.prototxt b/models/pascal_voc/ZF/fast_rcnn/solver.prototxt new file mode 100644 index 0000000..3b346cc --- /dev/null +++ b/models/pascal_voc/ZF/fast_rcnn/solver.prototxt @@ -0,0 +1,18 @@ +train_net: "models/pascal_voc/ZF/fast_rcnn/train.prototxt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_fast_rcnn" +#debug_info: true +#iter_size: 2 diff --git a/models/pascal_voc/ZF/fast_rcnn/test.prototxt b/models/pascal_voc/ZF/fast_rcnn/test.prototxt new file mode 100644 index 0000000..18d5cd8 --- /dev/null +++ b/models/pascal_voc/ZF/fast_rcnn/test.prototxt @@ -0,0 +1,251 @@ +name: "ZF" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "rois" +input_shape { + dim: 1 # to be changed on-the-fly to num ROIs + dim: 5 # [batch ind, x1, y1, x2, y2] zero-based indexing +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + inner_product_param { + num_output: 21 + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + inner_product_param { + num_output: 84 + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" + loss_param { + ignore_label: -1 + normalize: true + } +} diff --git a/models/pascal_voc/ZF/fast_rcnn/train.prototxt b/models/pascal_voc/ZF/fast_rcnn/train.prototxt new file mode 100644 index 0000000..921d8e3 --- /dev/null +++ b/models/pascal_voc/ZF/fast_rcnn/train.prototxt @@ -0,0 +1,300 @@ +name: "ZF" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "bbox_loss" + loss_weight: 1 +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/faster_rcnn_test.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/faster_rcnn_test.pt new file mode 100644 index 0000000..b24aae4 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/faster_rcnn_test.pt @@ -0,0 +1,327 @@ +name: "ZF" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + inner_product_param { + num_output: 21 + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + inner_product_param { + num_output: 84 + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" + loss_param { + ignore_label: -1 + normalize: true + } +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/rpn_test.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/rpn_test.pt new file mode 100644 index 0000000..204f08f --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/rpn_test.pt @@ -0,0 +1,233 @@ +name: "ZF" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +# ------------------------ layer 1 ----------------------------- +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} + +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#-----------------------layer +------------------------- + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#-----------------------output------------------------ +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + top: 'scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..0180e7c --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_fast_rcnn" diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt new file mode 100644 index 0000000..3d98184 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt @@ -0,0 +1,362 @@ +name: "ZF" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "bbox_loss" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt new file mode 100644 index 0000000..23a7c6a --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_rpn" diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_train.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_train.pt new file mode 100644 index 0000000..adf8605 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_train.pt @@ -0,0 +1,312 @@ +name: "ZF" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: "rpn_bbox_inside_weights" + bottom: "rpn_bbox_outside_weights" + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 9216 } + data_filler { type: "gaussian" std: 0.01 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..a666def --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_fast_rcnn" diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt new file mode 100644 index 0000000..262ed65 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt @@ -0,0 +1,362 @@ +name: "ZF" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "bbox_loss" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt new file mode 100644 index 0000000..15d3da7 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_rpn" diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_train.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_train.pt new file mode 100644 index 0000000..336b05b --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_train.pt @@ -0,0 +1,312 @@ +name: "ZF" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: "rpn_bbox_inside_weights" + bottom: "rpn_bbox_outside_weights" + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 9216 } + data_filler { type: "gaussian" std: 0.01 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/ZF/faster_rcnn_end2end/solver.prototxt b/models/pascal_voc/ZF/faster_rcnn_end2end/solver.prototxt new file mode 100644 index 0000000..246697a --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_end2end/solver.prototxt @@ -0,0 +1,25 @@ +train_net: "models/pascal_voc/ZF/faster_rcnn_end2end/train.prototxt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 50000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +#base_lr: 0.001 +#lr_policy: "exp" +#gamma: 0.999539589 # (0.00001/0.001)^(1/10000) +#display: 1 +#average_loss: 100 +#momentum: 0.9 +#weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_faster_rcnn" +iter_size: 2 diff --git a/models/pascal_voc/ZF/faster_rcnn_end2end/test.prototxt b/models/pascal_voc/ZF/faster_rcnn_end2end/test.prototxt new file mode 100644 index 0000000..6d88dc3 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_end2end/test.prototxt @@ -0,0 +1,373 @@ +name: "ZF" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +#layer { +# name: "rpn_conv/3x3" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/3x3" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 192 +# kernel_size: 3 pad: 1 stride: 1 +# weight_filler { type: "gaussian" std: 0.01 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn_conv/5x5" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/5x5" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 64 +# kernel_size: 5 pad: 2 stride: 1 +# weight_filler { type: "gaussian" std: 0.0036 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn/output" +# type: "Concat" +# bottom: "rpn_conv/3x3" +# bottom: "rpn_conv/5x5" +# top: "rpn/output" +#} +#layer { +# name: "rpn_relu/output" +# type: "ReLU" +# bottom: "rpn/output" +# top: "rpn/output" +#} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + inner_product_param { + num_output: 21 + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + inner_product_param { + num_output: 84 + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" + loss_param { + ignore_label: -1 + normalize: true + } +} diff --git a/models/pascal_voc/ZF/faster_rcnn_end2end/train.prototxt b/models/pascal_voc/ZF/faster_rcnn_end2end/train.prototxt new file mode 100644 index 0000000..c044fd5 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_end2end/train.prototxt @@ -0,0 +1,497 @@ +name: "ZF" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +#layer { +# name: "rpn_conv/3x3" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/3x3" +# param { lr_mult: 1.0 } +# param { lr_mult: 2.0 } +# convolution_param { +# num_output: 192 +# kernel_size: 3 pad: 1 stride: 1 +# weight_filler { type: "gaussian" std: 0.01 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn_conv/5x5" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/5x5" +# param { lr_mult: 1.0 } +# param { lr_mult: 2.0 } +# convolution_param { +# num_output: 64 +# kernel_size: 5 pad: 2 stride: 1 +# weight_filler { type: "gaussian" std: 0.0036 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn/output" +# type: "Concat" +# bottom: "rpn_conv/3x3" +# bottom: "rpn_conv/5x5" +# top: "rpn/output" +#} +#layer { +# name: "rpn_relu/output" +# type: "ReLU" +# bottom: "rpn/output" +# top: "rpn/output" +#} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 21" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: 'bbox_inside_weights' + bottom: 'bbox_outside_weights' + top: "bbox_loss" + loss_weight: 1 +} diff --git a/tools/README.md b/tools/README.md new file mode 100644 index 0000000..6fd4094 --- /dev/null +++ b/tools/README.md @@ -0,0 +1 @@ +Tools for training, testing, and compressing Fast R-CNN networks. diff --git a/tools/_init_paths.py b/tools/_init_paths.py new file mode 100644 index 0000000..f12404c --- /dev/null +++ b/tools/_init_paths.py @@ -0,0 +1,25 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Set up paths for Fast R-CNN.""" + +import os.path as osp +import sys + +def add_path(path): + if path not in sys.path: + sys.path.insert(0, path) + +this_dir = osp.dirname(__file__) + +# Add caffe to PYTHONPATH +caffe_path = osp.join(this_dir, '..', 'caffe', 'python') +add_path(caffe_path) + +# Add lib to PYTHONPATH +lib_path = osp.join(this_dir, '..', 'lib') +add_path(lib_path) diff --git a/tools/compress_net.py b/tools/compress_net.py new file mode 100755 index 0000000..e044e5b --- /dev/null +++ b/tools/compress_net.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Compress a Fast R-CNN network using truncated SVD.""" + +import _init_paths +import caffe +import argparse +import numpy as np +import os, sys + +def parse_args(): + """Parse input arguments.""" + parser = argparse.ArgumentParser(description='Compress a Fast R-CNN network') + parser.add_argument('--def', dest='prototxt', + help='prototxt file defining the uncompressed network', + default=None, type=str) + parser.add_argument('--def-svd', dest='prototxt_svd', + help='prototxt file defining the SVD compressed network', + default=None, type=str) + parser.add_argument('--net', dest='caffemodel', + help='model to compress', + default=None, type=str) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +def compress_weights(W, l): + """Compress the weight matrix W of an inner product (fully connected) layer + using truncated SVD. + + Parameters: + W: N x M weights matrix + l: number of singular values to retain + + Returns: + Ul, L: matrices such that W \approx Ul*L + """ + + # numpy doesn't seem to have a fast truncated SVD algorithm... + # this could be faster + U, s, V = np.linalg.svd(W, full_matrices=False) + + Ul = U[:, :l] + sl = s[:l] + Vl = V[:l, :] + + L = np.dot(np.diag(sl), Vl) + return Ul, L + +def main(): + args = parse_args() + + # prototxt = 'models/VGG16/test.prototxt' + # caffemodel = 'snapshots/vgg16_fast_rcnn_iter_40000.caffemodel' + net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST) + + # prototxt_svd = 'models/VGG16/svd/test_fc6_fc7.prototxt' + # caffemodel = 'snapshots/vgg16_fast_rcnn_iter_40000.caffemodel' + net_svd = caffe.Net(args.prototxt_svd, args.caffemodel, caffe.TEST) + + print('Uncompressed network {} : {}'.format(args.prototxt, args.caffemodel)) + print('Compressed network prototxt {}'.format(args.prototxt_svd)) + + out = os.path.splitext(os.path.basename(args.caffemodel))[0] + '_svd' + out_dir = os.path.dirname(args.caffemodel) + + # Compress fc6 + if net_svd.params.has_key('fc6_L'): + l_fc6 = net_svd.params['fc6_L'][0].data.shape[0] + print(' fc6_L bottleneck size: {}'.format(l_fc6)) + + # uncompressed weights and biases + W_fc6 = net.params['fc6'][0].data + B_fc6 = net.params['fc6'][1].data + + print(' compressing fc6...') + Ul_fc6, L_fc6 = compress_weights(W_fc6, l_fc6) + + assert(len(net_svd.params['fc6_L']) == 1) + + # install compressed matrix factors (and original biases) + net_svd.params['fc6_L'][0].data[...] = L_fc6 + + net_svd.params['fc6_U'][0].data[...] = Ul_fc6 + net_svd.params['fc6_U'][1].data[...] = B_fc6 + + out += '_fc6_{}'.format(l_fc6) + + # Compress fc7 + if net_svd.params.has_key('fc7_L'): + l_fc7 = net_svd.params['fc7_L'][0].data.shape[0] + print ' fc7_L bottleneck size: {}'.format(l_fc7) + + W_fc7 = net.params['fc7'][0].data + B_fc7 = net.params['fc7'][1].data + + print(' compressing fc7...') + Ul_fc7, L_fc7 = compress_weights(W_fc7, l_fc7) + + assert(len(net_svd.params['fc7_L']) == 1) + + net_svd.params['fc7_L'][0].data[...] = L_fc7 + + net_svd.params['fc7_U'][0].data[...] = Ul_fc7 + net_svd.params['fc7_U'][1].data[...] = B_fc7 + + out += '_fc7_{}'.format(l_fc7) + + filename = '{}/{}.caffemodel'.format(out_dir, out) + net_svd.save(filename) + print 'Wrote svd model to: {:s}'.format(filename) + +if __name__ == '__main__': + main() diff --git a/tools/demo.py b/tools/demo.py new file mode 100755 index 0000000..631c68a --- /dev/null +++ b/tools/demo.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +""" +Demo script showing detections in sample images. + +See README.md for installation instructions before running. +""" + +import _init_paths +from fast_rcnn.config import cfg +from fast_rcnn.test import im_detect +from fast_rcnn.nms_wrapper import nms +from utils.timer import Timer +import matplotlib.pyplot as plt +import numpy as np +import scipy.io as sio +import caffe, os, sys, cv2 +import argparse + +CLASSES = ('__background__', + 'aeroplane', 'bicycle', 'bird', 'boat', + 'bottle', 'bus', 'car', 'cat', 'chair', + 'cow', 'diningtable', 'dog', 'horse', + 'motorbike', 'person', 'pottedplant', + 'sheep', 'sofa', 'train', 'tvmonitor') + +NETS = {'vgg16': ('VGG16', + 'VGG16_faster_rcnn_final.caffemodel'), + 'zf': ('ZF', + 'ZF_faster_rcnn_final.caffemodel')} + + +def vis_detections(im, class_name, dets, thresh=0.5): + """Draw detected bounding boxes.""" + inds = np.where(dets[:, -1] >= thresh)[0] + if len(inds) == 0: + return + + im = im[:, :, (2, 1, 0)] + fig, ax = plt.subplots(figsize=(12, 12)) + ax.imshow(im, aspect='equal') + for i in inds: + bbox = dets[i, :4] + score = dets[i, -1] + + ax.add_patch( + plt.Rectangle((bbox[0], bbox[1]), + bbox[2] - bbox[0], + bbox[3] - bbox[1], fill=False, + edgecolor='red', linewidth=3.5) + ) + ax.text(bbox[0], bbox[1] - 2, + '{:s} {:.3f}'.format(class_name, score), + bbox=dict(facecolor='blue', alpha=0.5), + fontsize=14, color='white') + + ax.set_title(('{} detections with ' + 'p({} | box) >= {:.1f}').format(class_name, class_name, + thresh), + fontsize=14) + plt.axis('off') + plt.tight_layout() + plt.draw() + +def demo(net, image_name): + """Detect object classes in an image using pre-computed object proposals.""" + + # Load the demo image + im_file = os.path.join(cfg.DATA_DIR, 'demo', image_name) + im = cv2.imread(im_file) + + # Detect all object classes and regress object bounds + timer = Timer() + timer.tic() + scores, boxes = im_detect(net, im) + timer.toc() + print ('Detection took {:.3f}s for ' + '{:d} object proposals').format(timer.total_time, boxes.shape[0]) + + # Visualize detections for each class + CONF_THRESH = 0.8 + NMS_THRESH = 0.3 + for cls_ind, cls in enumerate(CLASSES[1:]): + cls_ind += 1 # because we skipped background + cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)] + cls_scores = scores[:, cls_ind] + dets = np.hstack((cls_boxes, + cls_scores[:, np.newaxis])).astype(np.float32) + keep = nms(dets, NMS_THRESH) + dets = dets[keep, :] + vis_detections(im, cls, dets, thresh=CONF_THRESH) + +def parse_args(): + """Parse input arguments.""" + parser = argparse.ArgumentParser(description='Faster R-CNN demo') + parser.add_argument('--gpu', dest='gpu_id', help='GPU device id to use [0]', + default=0, type=int) + parser.add_argument('--cpu', dest='cpu_mode', + help='Use CPU mode (overrides --gpu)', + action='store_true') + parser.add_argument('--net', dest='demo_net', help='Network to use [vgg16]', + choices=NETS.keys(), default='vgg16') + + args = parser.parse_args() + + return args + +if __name__ == '__main__': + cfg.TEST.HAS_RPN = True # Use RPN for proposals + + args = parse_args() + + prototxt = os.path.join(cfg.MODELS_DIR, NETS[args.demo_net][0], + 'faster_rcnn_alt_opt', 'faster_rcnn_test.pt') + caffemodel = os.path.join(cfg.DATA_DIR, 'faster_rcnn_models', + NETS[args.demo_net][1]) + + if not os.path.isfile(caffemodel): + raise IOError(('{:s} not found.\nDid you run ./data/script/' + 'fetch_faster_rcnn_models.sh?').format(caffemodel)) + + if args.cpu_mode: + caffe.set_mode_cpu() + else: + caffe.set_mode_gpu() + caffe.set_device(args.gpu_id) + cfg.GPU_ID = args.gpu_id + net = caffe.Net(prototxt, caffemodel, caffe.TEST) + + print '\n\nLoaded network {:s}'.format(caffemodel) + + # Warmup on a dummy image + im = 128 * np.ones((300, 500, 3), dtype=np.uint8) + for i in xrange(2): + _, _= im_detect(net, im) + + im_names = ['000456.jpg', '000542.jpg', '001150.jpg', + '001763.jpg', '004545.jpg'] + for im_name in im_names: + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Demo for data/demo/{}'.format(im_name) + demo(net, im_name) + + plt.show() diff --git a/tools/eval_recall.py b/tools/eval_recall.py new file mode 100755 index 0000000..b1a59dc --- /dev/null +++ b/tools/eval_recall.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +import _init_paths +from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list +from datasets.factory import get_imdb +import argparse +import time, os, sys +import numpy as np + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Test a Fast R-CNN network') + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to test', + default='voc_2007_test', type=str) + parser.add_argument('--method', dest='method', + help='proposal method', + default='selective_search', type=str) + parser.add_argument('--rpn-file', dest='rpn_file', + default=None, type=str) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +if __name__ == '__main__': + args = parse_args() + + print('Called with args:') + print(args) + + imdb = get_imdb(args.imdb_name) + imdb.set_proposal_method(args.method) + if args.rpn_file is not None: + imdb.config['rpn_file'] = args.rpn_file + + candidate_boxes = None + if 0: + import scipy.io as sio + filename = 'debug/stage1_rpn_voc_2007_test.mat' + raw_data = sio.loadmat(filename)['aboxes'].ravel() + candidate_boxes = raw_data + + ar, gt_overlaps, recalls, thresholds = \ + imdb.evaluate_recall(candidate_boxes=candidate_boxes) + print 'Method: {}'.format(args.method) + print 'AverageRec: {:.3f}'.format(ar) + + def recall_at(t): + ind = np.where(thresholds > t - 1e-5)[0][0] + assert np.isclose(thresholds[ind], t) + return recalls[ind] + + print 'Recall@0.5: {:.3f}'.format(recall_at(0.5)) + print 'Recall@0.6: {:.3f}'.format(recall_at(0.6)) + print 'Recall@0.7: {:.3f}'.format(recall_at(0.7)) + print 'Recall@0.8: {:.3f}'.format(recall_at(0.8)) + print 'Recall@0.9: {:.3f}'.format(recall_at(0.9)) + # print again for easy spreadsheet copying + print '{:.3f}'.format(ar) + print '{:.3f}'.format(recall_at(0.5)) + print '{:.3f}'.format(recall_at(0.6)) + print '{:.3f}'.format(recall_at(0.7)) + print '{:.3f}'.format(recall_at(0.8)) + print '{:.3f}'.format(recall_at(0.9)) diff --git a/tools/reval.py b/tools/reval.py new file mode 100755 index 0000000..905ec1b --- /dev/null +++ b/tools/reval.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Reval = re-eval. Re-evaluate saved detections.""" + +import _init_paths +from fast_rcnn.test import apply_nms +from fast_rcnn.config import cfg +from datasets.factory import get_imdb +import cPickle +import os, sys, argparse +import numpy as np + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Re-evaluate results') + parser.add_argument('output_dir', nargs=1, help='results directory', + type=str) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to re-evaluate', + default='voc_2007_test', type=str) + parser.add_argument('--matlab', dest='matlab_eval', + help='use matlab for evaluation', + action='store_true') + parser.add_argument('--comp', dest='comp_mode', help='competition mode', + action='store_true') + parser.add_argument('--nms', dest='apply_nms', help='apply nms', + action='store_true') + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +def from_dets(imdb_name, output_dir, args): + imdb = get_imdb(imdb_name) + imdb.competition_mode(args.comp_mode) + imdb.config['matlab_eval'] = args.matlab_eval + with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f: + dets = cPickle.load(f) + + if args.apply_nms: + print 'Applying NMS to all detections' + nms_dets = apply_nms(dets, cfg.TEST.NMS) + else: + nms_dets = dets + + print 'Evaluating detections' + imdb.evaluate_detections(nms_dets, output_dir) + +if __name__ == '__main__': + args = parse_args() + + output_dir = os.path.abspath(args.output_dir[0]) + imdb_name = args.imdb_name + from_dets(imdb_name, output_dir, args) diff --git a/tools/rpn_generate.py b/tools/rpn_generate.py new file mode 100755 index 0000000..f8ca4a1 --- /dev/null +++ b/tools/rpn_generate.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast/er/ R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Generate RPN proposals.""" + +import _init_paths +import numpy as np +from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list, get_output_dir +from datasets.factory import get_imdb +from rpn.generate import imdb_proposals +import cPickle +import caffe +import argparse +import pprint +import time, os, sys + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Test a Fast R-CNN network') + parser.add_argument('--gpu', dest='gpu_id', help='GPU id to use', + default=0, type=int) + parser.add_argument('--def', dest='prototxt', + help='prototxt file defining the network', + default=None, type=str) + parser.add_argument('--net', dest='caffemodel', + help='model to test', + default=None, type=str) + parser.add_argument('--cfg', dest='cfg_file', + help='optional config file', default=None, type=str) + parser.add_argument('--wait', dest='wait', + help='wait until net file exists', + default=True, type=bool) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to test', + default='voc_2007_test', type=str) + parser.add_argument('--set', dest='set_cfgs', + help='set config keys', default=None, + nargs=argparse.REMAINDER) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +if __name__ == '__main__': + args = parse_args() + + print('Called with args:') + print(args) + + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + if args.set_cfgs is not None: + cfg_from_list(args.set_cfgs) + + cfg.GPU_ID = args.gpu_id + + # RPN test settings + cfg.TEST.RPN_PRE_NMS_TOP_N = -1 + cfg.TEST.RPN_POST_NMS_TOP_N = 2000 + + print('Using config:') + pprint.pprint(cfg) + + while not os.path.exists(args.caffemodel) and args.wait: + print('Waiting for {} to exist...'.format(args.caffemodel)) + time.sleep(10) + + caffe.set_mode_gpu() + caffe.set_device(args.gpu_id) + net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST) + net.name = os.path.splitext(os.path.basename(args.caffemodel))[0] + + imdb = get_imdb(args.imdb_name) + imdb_boxes = imdb_proposals(net, imdb) + + output_dir = get_output_dir(imdb, net) + rpn_file = os.path.join(output_dir, net.name + '_rpn_proposals.pkl') + with open(rpn_file, 'wb') as f: + cPickle.dump(imdb_boxes, f, cPickle.HIGHEST_PROTOCOL) + print 'Wrote RPN proposals to {}'.format(rpn_file) diff --git a/tools/test_net.py b/tools/test_net.py new file mode 100755 index 0000000..de4f12b --- /dev/null +++ b/tools/test_net.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Test a Fast R-CNN network on an image database.""" + +import _init_paths +from fast_rcnn.test import test_net +from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list +from datasets.factory import get_imdb +import caffe +import argparse +import pprint +import time, os, sys + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Test a Fast R-CNN network') + parser.add_argument('--gpu', dest='gpu_id', help='GPU id to use', + default=0, type=int) + parser.add_argument('--def', dest='prototxt', + help='prototxt file defining the network', + default=None, type=str) + parser.add_argument('--net', dest='caffemodel', + help='model to test', + default=None, type=str) + parser.add_argument('--cfg', dest='cfg_file', + help='optional config file', default=None, type=str) + parser.add_argument('--wait', dest='wait', + help='wait until net file exists', + default=True, type=bool) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to test', + default='voc_2007_test', type=str) + parser.add_argument('--comp', dest='comp_mode', help='competition mode', + action='store_true') + parser.add_argument('--set', dest='set_cfgs', + help='set config keys', default=None, + nargs=argparse.REMAINDER) + parser.add_argument('--vis', dest='vis', help='visualize detections', + action='store_true') + parser.add_argument('--num_dets', dest='max_per_image', + help='max number of detections per image', + default=100, type=int) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +if __name__ == '__main__': + args = parse_args() + + print('Called with args:') + print(args) + + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + if args.set_cfgs is not None: + cfg_from_list(args.set_cfgs) + + cfg.GPU_ID = args.gpu_id + + print('Using config:') + pprint.pprint(cfg) + + while not os.path.exists(args.caffemodel) and args.wait: + print('Waiting for {} to exist...'.format(args.caffemodel)) + time.sleep(10) + + caffe.set_mode_gpu() + caffe.set_device(args.gpu_id) + net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST) + net.name = os.path.splitext(os.path.basename(args.caffemodel))[0] + + imdb = get_imdb(args.imdb_name) + imdb.competition_mode(args.comp_mode) + if not cfg.TEST.HAS_RPN: + imdb.set_proposal_method(cfg.TEST.PROPOSAL_METHOD) + + test_net(net, imdb, max_per_image=args.max_per_image, vis=args.vis) diff --git a/tools/train_faster_rcnn_alt_opt.py b/tools/train_faster_rcnn_alt_opt.py new file mode 100755 index 0000000..e49844a --- /dev/null +++ b/tools/train_faster_rcnn_alt_opt.py @@ -0,0 +1,334 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Train a Faster R-CNN network using alternating optimization. +This tool implements the alternating optimization algorithm described in our +NIPS 2015 paper ("Faster R-CNN: Towards Real-time Object Detection with Region +Proposal Networks." Shaoqing Ren, Kaiming He, Ross Girshick, Jian Sun.) +""" + +import _init_paths +from fast_rcnn.train import get_training_roidb, train_net +from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list, get_output_dir +from datasets.factory import get_imdb +from rpn.generate import imdb_proposals +import argparse +import pprint +import numpy as np +import sys, os +import multiprocessing as mp +import cPickle +import shutil + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Train a Faster R-CNN network') + parser.add_argument('--gpu', dest='gpu_id', + help='GPU device id to use [0]', + default=0, type=int) + parser.add_argument('--net_name', dest='net_name', + help='network name (e.g., "ZF")', + default=None, type=str) + parser.add_argument('--weights', dest='pretrained_model', + help='initialize with pretrained model weights', + default=None, type=str) + parser.add_argument('--cfg', dest='cfg_file', + help='optional config file', + default=None, type=str) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to train on', + default='voc_2007_trainval', type=str) + parser.add_argument('--set', dest='set_cfgs', + help='set config keys', default=None, + nargs=argparse.REMAINDER) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +def get_roidb(imdb_name, rpn_file=None): + imdb = get_imdb(imdb_name) + print 'Loaded dataset `{:s}` for training'.format(imdb.name) + imdb.set_proposal_method(cfg.TRAIN.PROPOSAL_METHOD) + print 'Set proposal method: {:s}'.format(cfg.TRAIN.PROPOSAL_METHOD) + if rpn_file is not None: + imdb.config['rpn_file'] = rpn_file + roidb = get_training_roidb(imdb) + return roidb, imdb + +def get_solvers(net_name): + # Faster R-CNN Alternating Optimization + n = 'faster_rcnn_alt_opt' + # Solver for each training stage + solvers = [[net_name, n, 'stage1_rpn_solver60k80k.pt'], + [net_name, n, 'stage1_fast_rcnn_solver30k40k.pt'], + [net_name, n, 'stage2_rpn_solver60k80k.pt'], + [net_name, n, 'stage2_fast_rcnn_solver30k40k.pt']] + solvers = [os.path.join(cfg.MODELS_DIR, *s) for s in solvers] + # Iterations for each training stage + max_iters = [80000, 40000, 80000, 40000] + # max_iters = [100, 100, 100, 100] + # Test prototxt for the RPN + rpn_test_prototxt = os.path.join( + cfg.MODELS_DIR, net_name, n, 'rpn_test.pt') + return solvers, max_iters, rpn_test_prototxt + +# ------------------------------------------------------------------------------ +# Pycaffe doesn't reliably free GPU memory when instantiated nets are discarded +# (e.g. "del net" in Python code). To work around this issue, each training +# stage is executed in a separate process using multiprocessing.Process. +# ------------------------------------------------------------------------------ + +def _init_caffe(cfg): + """Initialize pycaffe in a training process. + """ + + import caffe + # fix the random seeds (numpy and caffe) for reproducibility + np.random.seed(cfg.RNG_SEED) + caffe.set_random_seed(cfg.RNG_SEED) + # set up caffe + caffe.set_mode_gpu() + caffe.set_device(cfg.GPU_ID) + +def train_rpn(queue=None, imdb_name=None, init_model=None, solver=None, + max_iters=None, cfg=None): + """Train a Region Proposal Network in a separate training process. + """ + + # Not using any proposals, just ground-truth boxes + cfg.TRAIN.HAS_RPN = True + cfg.TRAIN.BBOX_REG = False # applies only to Fast R-CNN bbox regression + cfg.TRAIN.PROPOSAL_METHOD = 'gt' + cfg.TRAIN.IMS_PER_BATCH = 1 + print 'Init model: {}'.format(init_model) + print('Using config:') + pprint.pprint(cfg) + + import caffe + _init_caffe(cfg) + + roidb, imdb = get_roidb(imdb_name) + print 'roidb len: {}'.format(len(roidb)) + output_dir = get_output_dir(imdb) + print 'Output will be saved to `{:s}`'.format(output_dir) + + model_paths = train_net(solver, roidb, output_dir, + pretrained_model=init_model, + max_iters=max_iters) + # Cleanup all but the final model + for i in model_paths[:-1]: + os.remove(i) + rpn_model_path = model_paths[-1] + # Send final model path through the multiprocessing queue + queue.put({'model_path': rpn_model_path}) + +def rpn_generate(queue=None, imdb_name=None, rpn_model_path=None, cfg=None, + rpn_test_prototxt=None): + """Use a trained RPN to generate proposals. + """ + + cfg.TEST.RPN_PRE_NMS_TOP_N = -1 # no pre NMS filtering + cfg.TEST.RPN_POST_NMS_TOP_N = 2000 # limit top boxes after NMS + print 'RPN model: {}'.format(rpn_model_path) + print('Using config:') + pprint.pprint(cfg) + + import caffe + _init_caffe(cfg) + + # NOTE: the matlab implementation computes proposals on flipped images, too. + # We compute them on the image once and then flip the already computed + # proposals. This might cause a minor loss in mAP (less proposal jittering). + imdb = get_imdb(imdb_name) + print 'Loaded dataset `{:s}` for proposal generation'.format(imdb.name) + + # Load RPN and configure output directory + rpn_net = caffe.Net(rpn_test_prototxt, rpn_model_path, caffe.TEST) + output_dir = get_output_dir(imdb) + print 'Output will be saved to `{:s}`'.format(output_dir) + # Generate proposals on the imdb + rpn_proposals = imdb_proposals(rpn_net, imdb) + # Write proposals to disk and send the proposal file path through the + # multiprocessing queue + rpn_net_name = os.path.splitext(os.path.basename(rpn_model_path))[0] + rpn_proposals_path = os.path.join( + output_dir, rpn_net_name + '_proposals.pkl') + with open(rpn_proposals_path, 'wb') as f: + cPickle.dump(rpn_proposals, f, cPickle.HIGHEST_PROTOCOL) + print 'Wrote RPN proposals to {}'.format(rpn_proposals_path) + queue.put({'proposal_path': rpn_proposals_path}) + +def train_fast_rcnn(queue=None, imdb_name=None, init_model=None, solver=None, + max_iters=None, cfg=None, rpn_file=None): + """Train a Fast R-CNN using proposals generated by an RPN. + """ + + cfg.TRAIN.HAS_RPN = False # not generating prosals on-the-fly + cfg.TRAIN.PROPOSAL_METHOD = 'rpn' # use pre-computed RPN proposals instead + cfg.TRAIN.IMS_PER_BATCH = 2 + print 'Init model: {}'.format(init_model) + print 'RPN proposals: {}'.format(rpn_file) + print('Using config:') + pprint.pprint(cfg) + + import caffe + _init_caffe(cfg) + + roidb, imdb = get_roidb(imdb_name, rpn_file=rpn_file) + output_dir = get_output_dir(imdb) + print 'Output will be saved to `{:s}`'.format(output_dir) + # Train Fast R-CNN + model_paths = train_net(solver, roidb, output_dir, + pretrained_model=init_model, + max_iters=max_iters) + # Cleanup all but the final model + for i in model_paths[:-1]: + os.remove(i) + fast_rcnn_model_path = model_paths[-1] + # Send Fast R-CNN model path over the multiprocessing queue + queue.put({'model_path': fast_rcnn_model_path}) + +if __name__ == '__main__': + args = parse_args() + + print('Called with args:') + print(args) + + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + if args.set_cfgs is not None: + cfg_from_list(args.set_cfgs) + cfg.GPU_ID = args.gpu_id + + # -------------------------------------------------------------------------- + # Pycaffe doesn't reliably free GPU memory when instantiated nets are + # discarded (e.g. "del net" in Python code). To work around this issue, each + # training stage is executed in a separate process using + # multiprocessing.Process. + # -------------------------------------------------------------------------- + + # queue for communicated results between processes + mp_queue = mp.Queue() + # solves, iters, etc. for each training stage + solvers, max_iters, rpn_test_prototxt = get_solvers(args.net_name) + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 1 RPN, init from ImageNet model' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + cfg.TRAIN.SNAPSHOT_INFIX = 'stage1' + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + init_model=args.pretrained_model, + solver=solvers[0], + max_iters=max_iters[0], + cfg=cfg) + p = mp.Process(target=train_rpn, kwargs=mp_kwargs) + p.start() + rpn_stage1_out = mp_queue.get() + p.join() + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 1 RPN, generate proposals' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + rpn_model_path=str(rpn_stage1_out['model_path']), + cfg=cfg, + rpn_test_prototxt=rpn_test_prototxt) + p = mp.Process(target=rpn_generate, kwargs=mp_kwargs) + p.start() + rpn_stage1_out['proposal_path'] = mp_queue.get()['proposal_path'] + p.join() + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 1 Fast R-CNN using RPN proposals, init from ImageNet model' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + cfg.TRAIN.SNAPSHOT_INFIX = 'stage1' + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + init_model=args.pretrained_model, + solver=solvers[1], + max_iters=max_iters[1], + cfg=cfg, + rpn_file=rpn_stage1_out['proposal_path']) + p = mp.Process(target=train_fast_rcnn, kwargs=mp_kwargs) + p.start() + fast_rcnn_stage1_out = mp_queue.get() + p.join() + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 2 RPN, init from stage 1 Fast R-CNN model' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + cfg.TRAIN.SNAPSHOT_INFIX = 'stage2' + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + init_model=str(fast_rcnn_stage1_out['model_path']), + solver=solvers[2], + max_iters=max_iters[2], + cfg=cfg) + p = mp.Process(target=train_rpn, kwargs=mp_kwargs) + p.start() + rpn_stage2_out = mp_queue.get() + p.join() + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 2 RPN, generate proposals' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + rpn_model_path=str(rpn_stage2_out['model_path']), + cfg=cfg, + rpn_test_prototxt=rpn_test_prototxt) + p = mp.Process(target=rpn_generate, kwargs=mp_kwargs) + p.start() + rpn_stage2_out['proposal_path'] = mp_queue.get()['proposal_path'] + p.join() + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 2 Fast R-CNN, init from stage 2 RPN R-CNN model' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + cfg.TRAIN.SNAPSHOT_INFIX = 'stage2' + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + init_model=str(rpn_stage2_out['model_path']), + solver=solvers[3], + max_iters=max_iters[3], + cfg=cfg, + rpn_file=rpn_stage2_out['proposal_path']) + p = mp.Process(target=train_fast_rcnn, kwargs=mp_kwargs) + p.start() + fast_rcnn_stage2_out = mp_queue.get() + p.join() + + # Create final model (just a copy of the last stage) + final_path = os.path.join( + os.path.dirname(fast_rcnn_stage2_out['model_path']), + args.net_name + '_faster_rcnn_final.caffemodel') + print 'cp {} -> {}'.format( + fast_rcnn_stage2_out['model_path'], final_path) + shutil.copy(fast_rcnn_stage2_out['model_path'], final_path) + print 'Final model: {}'.format(final_path) diff --git a/tools/train_net.py b/tools/train_net.py new file mode 100755 index 0000000..622a95d --- /dev/null +++ b/tools/train_net.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Train a Fast R-CNN network on a region of interest database.""" + +import _init_paths +from fast_rcnn.train import get_training_roidb, train_net +from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list, get_output_dir +from datasets.factory import get_imdb +import datasets.imdb +import caffe +import argparse +import pprint +import numpy as np +import sys + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Train a Fast R-CNN network') + parser.add_argument('--gpu', dest='gpu_id', + help='GPU device id to use [0]', + default=0, type=int) + parser.add_argument('--solver', dest='solver', + help='solver prototxt', + default=None, type=str) + parser.add_argument('--iters', dest='max_iters', + help='number of iterations to train', + default=40000, type=int) + parser.add_argument('--weights', dest='pretrained_model', + help='initialize with pretrained model weights', + default=None, type=str) + parser.add_argument('--cfg', dest='cfg_file', + help='optional config file', + default=None, type=str) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to train on', + default='voc_2007_trainval', type=str) + parser.add_argument('--rand', dest='randomize', + help='randomize (do not use a fixed seed)', + action='store_true') + parser.add_argument('--set', dest='set_cfgs', + help='set config keys', default=None, + nargs=argparse.REMAINDER) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +def combined_roidb(imdb_names): + def get_roidb(imdb_name): + imdb = get_imdb(imdb_name) + print 'Loaded dataset `{:s}` for training'.format(imdb.name) + imdb.set_proposal_method(cfg.TRAIN.PROPOSAL_METHOD) + print 'Set proposal method: {:s}'.format(cfg.TRAIN.PROPOSAL_METHOD) + roidb = get_training_roidb(imdb) + return roidb + + roidbs = [get_roidb(s) for s in imdb_names.split('+')] + roidb = roidbs[0] + if len(roidbs) > 1: + for r in roidbs[1:]: + roidb.extend(r) + imdb = datasets.imdb.imdb(imdb_names) + else: + imdb = get_imdb(imdb_names) + return imdb, roidb + +if __name__ == '__main__': + args = parse_args() + + print('Called with args:') + print(args) + + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + if args.set_cfgs is not None: + cfg_from_list(args.set_cfgs) + + cfg.GPU_ID = args.gpu_id + + print('Using config:') + pprint.pprint(cfg) + + if not args.randomize: + # fix the random seeds (numpy and caffe) for reproducibility + np.random.seed(cfg.RNG_SEED) + caffe.set_random_seed(cfg.RNG_SEED) + + # set up caffe + caffe.set_mode_gpu() + caffe.set_device(args.gpu_id) + + imdb, roidb = combined_roidb(args.imdb_name) + print '{:d} roidb entries'.format(len(roidb)) + + output_dir = get_output_dir(imdb) + print 'Output will be saved to `{:s}`'.format(output_dir) + + train_net(args.solver, roidb, output_dir, + pretrained_model=args.pretrained_model, + max_iters=args.max_iters) diff --git a/tools/train_svms.py b/tools/train_svms.py new file mode 100755 index 0000000..498bbf2 --- /dev/null +++ b/tools/train_svms.py @@ -0,0 +1,353 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +""" +Train post-hoc SVMs using the algorithm and hyper-parameters from +traditional R-CNN. +""" + +import _init_paths +from fast_rcnn.config import cfg, cfg_from_file +from datasets.factory import get_imdb +from fast_rcnn.test import im_detect +from utils.timer import Timer +import caffe +import argparse +import pprint +import numpy as np +import numpy.random as npr +import cv2 +from sklearn import svm +import os, sys + +class SVMTrainer(object): + """ + Trains post-hoc detection SVMs for all classes using the algorithm + and hyper-parameters of traditional R-CNN. + """ + + def __init__(self, net, imdb): + self.imdb = imdb + self.net = net + self.layer = 'fc7' + self.hard_thresh = -1.0001 + self.neg_iou_thresh = 0.3 + + dim = net.params['cls_score'][0].data.shape[1] + scale = self._get_feature_scale() + print('Feature dim: {}'.format(dim)) + print('Feature scale: {:.3f}'.format(scale)) + self.trainers = [SVMClassTrainer(cls, dim, feature_scale=scale) + for cls in imdb.classes] + + def _get_feature_scale(self, num_images=100): + TARGET_NORM = 20.0 # Magic value from traditional R-CNN + _t = Timer() + roidb = self.imdb.roidb + total_norm = 0.0 + count = 0.0 + inds = npr.choice(xrange(self.imdb.num_images), size=num_images, + replace=False) + for i_, i in enumerate(inds): + im = cv2.imread(self.imdb.image_path_at(i)) + if roidb[i]['flipped']: + im = im[:, ::-1, :] + _t.tic() + scores, boxes = im_detect(self.net, im, roidb[i]['boxes']) + _t.toc() + feat = self.net.blobs[self.layer].data + total_norm += np.sqrt((feat ** 2).sum(axis=1)).sum() + count += feat.shape[0] + print('{}/{}: avg feature norm: {:.3f}'.format(i_ + 1, num_images, + total_norm / count)) + + return TARGET_NORM * 1.0 / (total_norm / count) + + def _get_pos_counts(self): + counts = np.zeros((len(self.imdb.classes)), dtype=np.int) + roidb = self.imdb.roidb + for i in xrange(len(roidb)): + for j in xrange(1, self.imdb.num_classes): + I = np.where(roidb[i]['gt_classes'] == j)[0] + counts[j] += len(I) + + for j in xrange(1, self.imdb.num_classes): + print('class {:s} has {:d} positives'. + format(self.imdb.classes[j], counts[j])) + + return counts + + def get_pos_examples(self): + counts = self._get_pos_counts() + for i in xrange(len(counts)): + self.trainers[i].alloc_pos(counts[i]) + + _t = Timer() + roidb = self.imdb.roidb + num_images = len(roidb) + # num_images = 100 + for i in xrange(num_images): + im = cv2.imread(self.imdb.image_path_at(i)) + if roidb[i]['flipped']: + im = im[:, ::-1, :] + gt_inds = np.where(roidb[i]['gt_classes'] > 0)[0] + gt_boxes = roidb[i]['boxes'][gt_inds] + _t.tic() + scores, boxes = im_detect(self.net, im, gt_boxes) + _t.toc() + feat = self.net.blobs[self.layer].data + for j in xrange(1, self.imdb.num_classes): + cls_inds = np.where(roidb[i]['gt_classes'][gt_inds] == j)[0] + if len(cls_inds) > 0: + cls_feat = feat[cls_inds, :] + self.trainers[j].append_pos(cls_feat) + + print 'get_pos_examples: {:d}/{:d} {:.3f}s' \ + .format(i + 1, len(roidb), _t.average_time) + + def initialize_net(self): + # Start all SVM parameters at zero + self.net.params['cls_score'][0].data[...] = 0 + self.net.params['cls_score'][1].data[...] = 0 + + # Initialize SVMs in a smart way. Not doing this because its such + # a good initialization that we might not learn something close to + # the SVM solution. +# # subtract background weights and biases for the foreground classes +# w_bg = self.net.params['cls_score'][0].data[0, :] +# b_bg = self.net.params['cls_score'][1].data[0] +# self.net.params['cls_score'][0].data[1:, :] -= w_bg +# self.net.params['cls_score'][1].data[1:] -= b_bg +# # set the background weights and biases to 0 (where they shall remain) +# self.net.params['cls_score'][0].data[0, :] = 0 +# self.net.params['cls_score'][1].data[0] = 0 + + def update_net(self, cls_ind, w, b): + self.net.params['cls_score'][0].data[cls_ind, :] = w + self.net.params['cls_score'][1].data[cls_ind] = b + + def train_with_hard_negatives(self): + _t = Timer() + roidb = self.imdb.roidb + num_images = len(roidb) + # num_images = 100 + for i in xrange(num_images): + im = cv2.imread(self.imdb.image_path_at(i)) + if roidb[i]['flipped']: + im = im[:, ::-1, :] + _t.tic() + scores, boxes = im_detect(self.net, im, roidb[i]['boxes']) + _t.toc() + feat = self.net.blobs[self.layer].data + for j in xrange(1, self.imdb.num_classes): + hard_inds = \ + np.where((scores[:, j] > self.hard_thresh) & + (roidb[i]['gt_overlaps'][:, j].toarray().ravel() < + self.neg_iou_thresh))[0] + if len(hard_inds) > 0: + hard_feat = feat[hard_inds, :].copy() + new_w_b = \ + self.trainers[j].append_neg_and_retrain(feat=hard_feat) + if new_w_b is not None: + self.update_net(j, new_w_b[0], new_w_b[1]) + + print(('train_with_hard_negatives: ' + '{:d}/{:d} {:.3f}s').format(i + 1, len(roidb), + _t.average_time)) + + def train(self): + # Initialize SVMs using + # a. w_i = fc8_w_i - fc8_w_0 + # b. b_i = fc8_b_i - fc8_b_0 + # c. Install SVMs into net + self.initialize_net() + + # Pass over roidb to count num positives for each class + # a. Pre-allocate arrays for positive feature vectors + # Pass over roidb, computing features for positives only + self.get_pos_examples() + + # Pass over roidb + # a. Compute cls_score with forward pass + # b. For each class + # i. Select hard negatives + # ii. Add them to cache + # c. For each class + # i. If SVM retrain criteria met, update SVM + # ii. Install new SVM into net + self.train_with_hard_negatives() + + # One final SVM retraining for each class + # Install SVMs into net + for j in xrange(1, self.imdb.num_classes): + new_w_b = self.trainers[j].append_neg_and_retrain(force=True) + self.update_net(j, new_w_b[0], new_w_b[1]) + +class SVMClassTrainer(object): + """Manages post-hoc SVM training for a single object class.""" + + def __init__(self, cls, dim, feature_scale=1.0, + C=0.001, B=10.0, pos_weight=2.0): + self.pos = np.zeros((0, dim), dtype=np.float32) + self.neg = np.zeros((0, dim), dtype=np.float32) + self.B = B + self.C = C + self.cls = cls + self.pos_weight = pos_weight + self.dim = dim + self.feature_scale = feature_scale + self.svm = svm.LinearSVC(C=C, class_weight={1: 2, -1: 1}, + intercept_scaling=B, verbose=1, + penalty='l2', loss='l1', + random_state=cfg.RNG_SEED, dual=True) + self.pos_cur = 0 + self.num_neg_added = 0 + self.retrain_limit = 2000 + self.evict_thresh = -1.1 + self.loss_history = [] + + def alloc_pos(self, count): + self.pos_cur = 0 + self.pos = np.zeros((count, self.dim), dtype=np.float32) + + def append_pos(self, feat): + num = feat.shape[0] + self.pos[self.pos_cur:self.pos_cur + num, :] = feat + self.pos_cur += num + + def train(self): + print('>>> Updating {} detector <<<'.format(self.cls)) + num_pos = self.pos.shape[0] + num_neg = self.neg.shape[0] + print('Cache holds {} pos examples and {} neg examples'. + format(num_pos, num_neg)) + X = np.vstack((self.pos, self.neg)) * self.feature_scale + y = np.hstack((np.ones(num_pos), + -np.ones(num_neg))) + self.svm.fit(X, y) + w = self.svm.coef_ + b = self.svm.intercept_[0] + scores = self.svm.decision_function(X) + pos_scores = scores[:num_pos] + neg_scores = scores[num_pos:] + + pos_loss = (self.C * self.pos_weight * + np.maximum(0, 1 - pos_scores).sum()) + neg_loss = self.C * np.maximum(0, 1 + neg_scores).sum() + reg_loss = 0.5 * np.dot(w.ravel(), w.ravel()) + 0.5 * b ** 2 + tot_loss = pos_loss + neg_loss + reg_loss + self.loss_history.append((tot_loss, pos_loss, neg_loss, reg_loss)) + + for i, losses in enumerate(self.loss_history): + print((' {:d}: obj val: {:.3f} = {:.3f} ' + '(pos) + {:.3f} (neg) + {:.3f} (reg)').format(i, *losses)) + + # Sanity check + scores_ret = ( + X * 1.0 / self.feature_scale).dot(w.T * self.feature_scale) + b + assert np.allclose(scores, scores_ret[:, 0], atol=1e-5), \ + "Scores from returned model don't match decision function" + + return ((w * self.feature_scale, b), pos_scores, neg_scores) + + def append_neg_and_retrain(self, feat=None, force=False): + if feat is not None: + num = feat.shape[0] + self.neg = np.vstack((self.neg, feat)) + self.num_neg_added += num + if self.num_neg_added > self.retrain_limit or force: + self.num_neg_added = 0 + new_w_b, pos_scores, neg_scores = self.train() + # scores = np.dot(self.neg, new_w_b[0].T) + new_w_b[1] + # easy_inds = np.where(neg_scores < self.evict_thresh)[0] + not_easy_inds = np.where(neg_scores >= self.evict_thresh)[0] + if len(not_easy_inds) > 0: + self.neg = self.neg[not_easy_inds, :] + # self.neg = np.delete(self.neg, easy_inds) + print(' Pruning easy negatives') + print(' Cache holds {} pos examples and {} neg examples'. + format(self.pos.shape[0], self.neg.shape[0])) + print(' {} pos support vectors'.format((pos_scores <= 1).sum())) + print(' {} neg support vectors'.format((neg_scores >= -1).sum())) + return new_w_b + else: + return None + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Train SVMs (old skool)') + parser.add_argument('--gpu', dest='gpu_id', help='GPU device id to use [0]', + default=0, type=int) + parser.add_argument('--def', dest='prototxt', + help='prototxt file defining the network', + default=None, type=str) + parser.add_argument('--net', dest='caffemodel', + help='model to test', + default=None, type=str) + parser.add_argument('--cfg', dest='cfg_file', + help='optional config file', default=None, type=str) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to train on', + default='voc_2007_trainval', type=str) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +if __name__ == '__main__': + # Must turn this off to prevent issues when digging into the net blobs to + # pull out features (tricky!) + cfg.DEDUP_BOXES = 0 + + # Must turn this on because we use the test im_detect() method to harvest + # hard negatives + cfg.TEST.SVM = True + + args = parse_args() + + print('Called with args:') + print(args) + + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + + print('Using config:') + pprint.pprint(cfg) + + # fix the random seed for reproducibility + np.random.seed(cfg.RNG_SEED) + + # set up caffe + caffe.set_mode_gpu() + if args.gpu_id is not None: + caffe.set_device(args.gpu_id) + net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST) + net.name = os.path.splitext(os.path.basename(args.caffemodel))[0] + out = os.path.splitext(os.path.basename(args.caffemodel))[0] + '_svm' + out_dir = os.path.dirname(args.caffemodel) + + imdb = get_imdb(args.imdb_name) + print 'Loaded dataset `{:s}` for training'.format(imdb.name) + + # enhance roidb to contain flipped examples + if cfg.TRAIN.USE_FLIPPED: + print 'Appending horizontally-flipped training examples...' + imdb.append_flipped_images() + print 'done' + + SVMTrainer(net, imdb).train() + + filename = '{}/{}.caffemodel'.format(out_dir, out) + net.save(filename) + print 'Wrote svm model to: {:s}'.format(filename)